file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
scriptBTVNb10_2.js
/* Cho object obj, hãy tạo một array chứa key của tất cả các property của obj và đều viết hoa có sử dụng Object.key() có sử dụng các method của array Có sử dụng toUpperCase() */ let obj = { b: 2, a: 1, c: 3, }; // TODO //?? OUTPUT is [B, A, C]
a.push(keys[i].toUpperCase()); }; console.log(a)
const keys = Object.keys(obj); let a =[]; for (let i in keys) {
exceptions.py
# -*- coding: utf-8 -*- # Copyright (c) 2016 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """Exceptions for add_mon playbook.""" from decapod_common import exceptions as base_exceptions
"""Exception family, specific for DecapodError plugin for cluster deployment.""" class SecretWasNotFound(ClusterDeployError): """Exception raised if not secret is found""" def __init__(self, cluster_id): super().__init__( "No monitor secret is defined for cluster {0}".format(cluster_id)) class UnknownPlaybookConfiguration(ClusterDeployError): """Exception raised if playbook configuration is unknown."""
class ClusterDeployError(base_exceptions.DecapodError):
urls.go
package servergroups import "github.com/huaweicloud/golangsdk" const resourcePath = "os-server-groups" func resourceURL(c *golangsdk.ServiceClient) string
func listURL(c *golangsdk.ServiceClient) string { return resourceURL(c) } func createURL(c *golangsdk.ServiceClient) string { return resourceURL(c) } func getURL(c *golangsdk.ServiceClient, id string) string { return c.ServiceURL(resourcePath, id) } func actionURL(c *golangsdk.ServiceClient, id string) string { return c.ServiceURL("cloudservers", resourcePath, id, "action") } func deleteURL(c *golangsdk.ServiceClient, id string) string { return getURL(c, id) }
{ return c.ServiceURL(resourcePath) }
rmon_t_p128to255.rs
#[doc = "Register `RMON_T_P128TO255` reader"] pub struct R(crate::R<RMON_T_P128TO255_SPEC>); impl core::ops::Deref for R { type Target = crate::R<RMON_T_P128TO255_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<RMON_T_P128TO255_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<RMON_T_P128TO255_SPEC>) -> Self { R(reader) } } #[doc = "Field `TXPKTS` reader - Number of 128- to 255-byte transmit packets"] pub struct TXPKTS_R(crate::FieldReader<u16, u16>); impl TXPKTS_R { #[inline(always)] pub(crate) fn new(bits: u16) -> Self { TXPKTS_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for TXPKTS_R { type Target = crate::FieldReader<u16, u16>; #[inline(always)] fn
(&self) -> &Self::Target { &self.0 } } impl R { #[doc = "Bits 0:15 - Number of 128- to 255-byte transmit packets"] #[inline(always)] pub fn txpkts(&self) -> TXPKTS_R { TXPKTS_R::new((self.bits & 0xffff) as u16) } } #[doc = "Tx 128- to 255-byte Packets Statistic Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rmon_t_p128to255](index.html) module"] pub struct RMON_T_P128TO255_SPEC; impl crate::RegisterSpec for RMON_T_P128TO255_SPEC { type Ux = u32; } #[doc = "`read()` method returns [rmon_t_p128to255::R](R) reader structure"] impl crate::Readable for RMON_T_P128TO255_SPEC { type Reader = R; } #[doc = "`reset()` method sets RMON_T_P128TO255 to value 0"] impl crate::Resettable for RMON_T_P128TO255_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
deref
APISectionTypes.tsx
import React from 'react'; import ReactMarkdown from 'react-markdown'; import { InlineCode } from '~/components/base/code'; import { UL, LI } from '~/components/base/list'; import { B, P } from '~/components/base/paragraph'; import { H2, H3Code, H4 } from '~/components/plugins/Headings'; import { PropData, TypeDeclarationContentData, TypeDefinitionData, TypeGeneralData, TypeSignaturesData, } from '~/components/plugins/api/APIDataTypes'; import { mdInlineRenderers, mdRenderers, resolveTypeName, renderFlags, renderParam, CommentTextBlock, parseCommentContent, renderTypeOrSignatureType, getCommentOrSignatureComment, } from '~/components/plugins/api/APISectionUtils'; export type APISectionTypesProps = { data: TypeGeneralData[]; }; const defineLiteralType = (types: TypeDefinitionData[]): JSX.Element | null => { const uniqueTypes = Array.from( new Set(types.map((t: TypeDefinitionData) => t.value && typeof t.value)) ); if (uniqueTypes.length === 1 && uniqueTypes.filter(Boolean).length === 1) { return ( <> <InlineCode>{uniqueTypes[0]}</InlineCode> {' - '} </> ); } return null; }; const renderTypeDeclarationTable = ({ children }: TypeDeclarationContentData): JSX.Element => ( <table key={`type-declaration-table-${children?.map(child => child.name).join('-')}`}> <thead> <tr> <th>Name</th> <th>Type</th> <th>Description</th> </tr> </thead> <tbody>{children?.map(renderTypePropertyRow)}</tbody> </table> ); const renderTypePropertyRow = ({ name, flags, type, comment, defaultValue, signatures, }: PropData): JSX.Element => { const initValue = defaultValue || comment?.tags?.filter(tag => tag.tag === 'default')[0]?.text; const commentData = getCommentOrSignatureComment(comment, signatures); return ( <tr key={name}> <td> <B>{name}</B> {renderFlags(flags)} </td> <td>{renderTypeOrSignatureType(type, signatures)}</td> <td> {commentData ? ( <CommentTextBlock comment={commentData} renderers={mdInlineRenderers} /> ) : (
<> <br /> <ReactMarkdown renderers={mdInlineRenderers}>{`__Default:__ ${parseCommentContent( initValue )}`}</ReactMarkdown> </> ) : null} </td> </tr> ); }; const renderType = ({ name, comment, type }: TypeGeneralData): JSX.Element | undefined => { if (type.declaration) { // Object Types return ( <div key={`type-definition-${name}`}> <H3Code> <InlineCode> {name} {type.declaration.signatures ? '()' : ''} </InlineCode> </H3Code> <CommentTextBlock comment={comment} /> {type.declaration.children && renderTypeDeclarationTable(type.declaration)} {type.declaration.signatures ? type.declaration.signatures.map(({ parameters }: TypeSignaturesData) => ( <div key={`type-definition-signature-${name}`}> {parameters ? <H4>Arguments</H4> : null} {parameters ? <UL>{parameters?.map(renderParam)}</UL> : null} </div> )) : null} </div> ); } else if (type.types && ['union', 'intersection'].includes(type.type)) { const literalTypes = type.types.filter((t: TypeDefinitionData) => ['literal', 'intrinsic', 'reference', 'tuple'].includes(t.type) ); const propTypes = type.types.filter((t: TypeDefinitionData) => t.type === 'reflection'); if (propTypes.length) { return ( <div key={`prop-type-definition-${name}`}> <H3Code> <InlineCode>{name}</InlineCode> </H3Code> {type.type === 'intersection' ? ( <P> <InlineCode> {type.types.filter(type => type.type === 'reference').map(resolveTypeName)} </InlineCode>{' '} extended by: </P> ) : null} <CommentTextBlock comment={comment} /> {propTypes.map( propType => propType?.declaration?.children && renderTypeDeclarationTable(propType.declaration) )} </div> ); } else if (literalTypes.length) { return ( <div key={`type-definition-${name}`}> <H3Code> <InlineCode>{name}</InlineCode> </H3Code> <CommentTextBlock comment={comment} /> <P> {defineLiteralType(literalTypes)} Acceptable values are:{' '} {literalTypes.map((lt, index) => ( <span key={`${name}-literal-type-${index}`}> <InlineCode>{resolveTypeName(lt)}</InlineCode> {index + 1 !== literalTypes.length ? ', ' : '.'} </span> ))} </P> </div> ); } } else if ((type.name === 'Record' && type.typeArguments) || type.type === 'reference') { return ( <div key={`record-definition-${name}`}> <H3Code> <InlineCode>{name}</InlineCode> </H3Code> <UL> <LI> <InlineCode>{resolveTypeName(type)}</InlineCode> </LI> </UL> <CommentTextBlock comment={comment} /> </div> ); } else if (type.type === 'intrinsic') { return ( <div key={`generic-type-definition-${name}`}> <H3Code> <InlineCode>{name}</InlineCode> </H3Code> <CommentTextBlock comment={comment} /> <ReactMarkdown renderers={mdRenderers}>{'__Type:__ `' + type.name + '`'}</ReactMarkdown> </div> ); } return undefined; }; const APISectionTypes: React.FC<APISectionTypesProps> = ({ data }) => data?.length ? ( <> <H2 key="types-header">Types</H2> {data.map(renderType)} </> ) : null; export default APISectionTypes;
'-' )} {initValue ? (
vec1.rs
// vec1.rs // Your task is to create a `Vec` which holds the exact same elements // as in the array `a`. // Make me compile and pass the test! // Execute the command `rustlings hint vec1` if you need hints. fn array_and_vec() -> ([i32; 4], Vec<i32>) { let a = [10, 20, 30, 40]; // a plain array let v = vec![10,20,30,40]; // TODO: declare your vector here with the macro for vectors return (a, v) } #[cfg(test)]
fn test_array_and_vec_similarity() { let (a, v) = array_and_vec(); assert_eq!(a, v[..]); } }
mod tests { use super::*; #[test]
hash_sample.go
// Copyright (c) 2018 Lorenzo Alberton // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package processor import ( "math" "github.com/OneOfOne/xxhash" "github.com/Jeffail/benthos/lib/log" "github.com/Jeffail/benthos/lib/metrics" "github.com/Jeffail/benthos/lib/types" ) //------------------------------------------------------------------------------ func init()
//------------------------------------------------------------------------------ // hashSamplingNorm is the constant factor to normalise a uint64 into the // (0.0, 100.0) range. const hashSamplingNorm = 100.0 / float64(math.MaxUint64) func scaleNum(n uint64) float64 { return float64(n) * hashSamplingNorm } //------------------------------------------------------------------------------ // HashSampleConfig contains any configuration for the HashSample processor. type HashSampleConfig struct { RetainMin float64 `json:"retain_min" yaml:"retain_min"` RetainMax float64 `json:"retain_max" yaml:"retain_max"` Parts []int `json:"parts" yaml:"parts"` // message parts to hash } // NewHashSampleConfig returns a HashSampleConfig with default values. func NewHashSampleConfig() HashSampleConfig { return HashSampleConfig{ RetainMin: 0.0, RetainMax: 10.0, // retain the first [0, 10%) interval Parts: []int{0}, // only consider the 1st part } } //------------------------------------------------------------------------------ // HashSample is a processor that checks each message against a set of bounds // and rejects messages if they aren't within them. type HashSample struct { conf Config log log.Modular stats metrics.Type mCount metrics.StatCounter mDropOOB metrics.StatCounter mDropped metrics.StatCounter mErrHash metrics.StatCounter mSent metrics.StatCounter mSentParts metrics.StatCounter } // NewHashSample returns a HashSample processor. func NewHashSample( conf Config, mgr types.Manager, log log.Modular, stats metrics.Type, ) (Type, error) { return &HashSample{ conf: conf, log: log.NewModule(".processor.hash_sample"), stats: stats, mCount: stats.GetCounter("processor.hash_sample.count"), mDropOOB: stats.GetCounter("processor.hash_sample.dropped_part_out_of_bounds"), mDropped: stats.GetCounter("processor.hash_sample.dropped"), mErrHash: stats.GetCounter("processor.hash_sample.hashing_error"), mSent: stats.GetCounter("processor.hash_sample.sent"), mSentParts: stats.GetCounter("processor.hash_sample.parts.sent"), }, nil } //------------------------------------------------------------------------------ // ProcessMessage checks each message against a set of bounds. func (s *HashSample) ProcessMessage(msg types.Message) ([]types.Message, types.Response) { s.mCount.Incr(1) hash := xxhash.New64() lParts := msg.Len() for _, index := range s.conf.HashSample.Parts { if index < 0 { // Negative indexes count backwards from the end. index = lParts + index } // Check boundary of part index. if index < 0 || index >= lParts { s.mDropOOB.Incr(1) s.mDropped.Incr(1) s.log.Debugf("Cannot sample message part %v for parts count: %v\n", index, lParts) return nil, types.NewSimpleResponse(nil) } // Attempt to add part to hash. if _, err := hash.Write(msg.Get(index)); nil != err { s.mErrHash.Incr(1) s.log.Debugf("Cannot hash message part for sampling: %v\n", err) return nil, types.NewSimpleResponse(nil) } } rate := scaleNum(hash.Sum64()) if rate >= s.conf.HashSample.RetainMin && rate < s.conf.HashSample.RetainMax { s.mSent.Incr(1) s.mSentParts.Incr(int64(msg.Len())) msgs := [1]types.Message{msg} return msgs[:], nil } s.mDropped.Incr(1) return nil, types.NewSimpleResponse(nil) } //------------------------------------------------------------------------------
{ Constructors["hash_sample"] = TypeSpec{ constructor: NewHashSample, description: ` Passes on a percentage of messages deterministically by hashing selected parts of the message and checking the hash against a valid range, dropping all others. For example, a 'hash_sample' with 'retain_min' of 0.0 and 'remain_max' of 50.0 will receive half of the input stream, and a 'hash_sample' with 'retain_min' of 50.0 and 'retain_max' of 100.1 will receive the other half. The part indexes can be negative, and if so the part will be selected from the end counting backwards starting from -1. E.g. if index = -1 then the selected part will be the last part of the message, if index = -2 then the part before the last element with be selected, and so on.`, } }
array_parser.go
package types import ( "bufio" "bytes" "errors" "fmt" "io" ) var endOfArray = errors.New("types: end of array") type arrayParser struct { p streamingParser valid bool stickyErr error } func newArrayParser(rd Reader) *arrayParser
func (p *arrayParser) Valid() bool { if p.stickyErr != nil { return false } return p.p.Buffered() > 0 } func (p *arrayParser) NextElem() ([]byte, error) { if p.stickyErr != nil { return nil, p.stickyErr } c, err := p.p.ReadByte() if err != nil { return nil, err } switch c { case '"': b, err := p.p.ReadSubstring() if err != nil { return nil, err } err = p.readCommaBrace() if err != nil { return nil, err } return b, nil case '{': b, err := p.readSubArray() if err != nil { return nil, err } err = p.readCommaBrace() if err != nil { return nil, err } return b, nil case '}': return nil, endOfArray default: err = p.p.UnreadByte() if err != nil { return nil, err } var b []byte for { bb, err := p.p.ReadSlice(',') if b == nil { b = bb[:len(bb):len(bb)] } else { b = append(b, bb...) } if err == nil { b = b[:len(b)-1] break } if err == bufio.ErrBufferFull { continue } if err == io.EOF { if b[len(b)-1] == '}' { b = b[:len(b)-1] break } } return nil, err } if bytes.Equal(b, []byte("NULL")) { return nil, nil } return b, nil } } func (p *arrayParser) readSubArray() ([]byte, error) { var b []byte b = append(b, '{') for { c, err := p.p.ReadByte() if err != nil { return nil, err } switch c { case '"': b = append(b, '"') for { bb, err := p.p.ReadSlice('"') b = append(b, bb...) if err != nil { return nil, err } if len(b) > 1 && b[len(b)-2] != '\\' { break } } case '}': b = append(b, '}') return b, nil default: b = append(b, c) } } } func (p *arrayParser) readCommaBrace() error { c, err := p.p.ReadByte() if err != nil { return err } switch c { case ',': return nil case '}': return nil default: return fmt.Errorf("pg: got %q, wanted ',' or '}'", c) } }
{ c, err := rd.ReadByte() if err != nil { return &arrayParser{ stickyErr: err, } } if c != '{' { return &arrayParser{ stickyErr: fmt.Errorf("pg: expecting '{', got %q", c), } } return &arrayParser{ p: newStreamingParser(rd), stickyErr: err, } }
cfgr.rs
/// Timer clock prescaler. #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[repr(u8)] pub enum Prescaler { /// /1 Div1 = 0b000, /// /2 Div2 = 0b001, /// /4 Div4 = 0b010, /// /8 Div8 = 0b011, /// /16 Div16 = 0b100, /// /32 Div32 = 0b101, /// /64 Div64 = 0b110, /// /128 Div128 = 0b111, } impl Default for Prescaler { /// Reset value of the prescaler. fn default() -> Self { Prescaler::Div1 } } /// LPTIM1 and LPTIM2 trigger selection. #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[repr(u8)] pub enum TrgSel { /// RTC alarm A. RtcAlarmA = 0b001, /// RTC alarm B. RtcAlarmB = 0b010, /// TAMP1 input detection. Tamp1 = 0b011, /// TAMP2 input detection. Tamp2 = 0b100, /// TAMP3 input detection. Tamp3 = 0b101, /// COMP1_OUT. Comp1 = 0b110, /// COMP2_OUT. Comp2 = 0b111, } impl From<TrgSel> for u32 { fn from(sel: TrgSel) -> Self { sel as u32 } } /// LPTIM3 trigger selection. #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[repr(u8)] pub enum TrgSel3 { /// LPTIM1_OUT. LpTim1 = 0b001, /// LPTIM2_OUT. LpTim2 = 0b010, } impl From<TrgSel3> for u32 { fn from(sel: TrgSel3) -> Self { sel as u32 } } /// Trigger polarity. #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[repr(u8)] pub enum TrgPol { /// Software trigger. Soft = 0b00, /// Rising edge is active edge. Rising = 0b01, /// Falling edge is active edge. Falling = 0b10, /// Both edges are active edges. Both = 0b11, } /// Filter for triggers and external clocks. #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[repr(u8)] pub enum Filter { /// Any level change is considered valid. Any = 0b00, /// Level must be stable for at least 2 clock periods /// before it is considered as valid. Clk2 = 0b01, /// Level must be stable for at least 4 clock periods /// before it is considered as valid. Clk4 = 0b10, /// Level must be stable for at least 8 clock periods /// before it is considered as valid. Clk8 = 0b11, } impl Prescaler { /// Get the prescaler divisor. /// /// # Example /// /// ``` /// use stm32wl_hal::lptim::Prescaler; /// /// assert_eq!(Prescaler::Div1.div(), 1); /// assert_eq!(Prescaler::Div2.div(), 2); /// assert_eq!(Prescaler::Div4.div(), 4); /// assert_eq!(Prescaler::Div8.div(), 8); /// assert_eq!(Prescaler::Div16.div(), 16); /// assert_eq!(Prescaler::Div32.div(), 32); /// assert_eq!(Prescaler::Div64.div(), 64); /// assert_eq!(Prescaler::Div128.div(), 128); /// ``` pub const fn div(&self) -> u8 { match self { Prescaler::Div1 => 1, Prescaler::Div2 => 2, Prescaler::Div4 => 4, Prescaler::Div8 => 8, Prescaler::Div16 => 16, Prescaler::Div32 => 32, Prescaler::Div64 => 64, Prescaler::Div128 => 128, } } } /// Configuration register. #[derive(Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] pub struct Cfgr { val: u32, } impl Cfgr { /// Reset value of the register. /// /// # Example /// /// ``` /// use stm32wl_hal::lptim::Cfgr; /// assert_eq!(Cfgr::RESET.raw(), 0); /// ``` pub const RESET: Cfgr = Cfgr::new(0); /// Cfgreate a new Cfgr register from a raw value. /// /// # Example /// /// ``` /// use stm32wl_hal::lptim::Cfgr; /// const CFGR: Cfgr = Cfgr::new(0); /// ``` pub const fn new(val: u32) -> Cfgr { Cfgr { val } } /// Get the raw value of the register. /// /// # Example /// /// ``` /// use stm32wl_hal::lptim::Cfgr; /// const CFGR: Cfgr = Cfgr::new(0x1234_5678); /// assert_eq!(CFGR.raw(), 0x1234_5678); /// ``` pub const fn raw(self) -> u32 { self.val } /// Set the trigger polarity. #[inline] #[must_use = "set_trg_pol returns a modified Cfgr"] pub fn set_trg_pol(mut self, trg_pol: TrgPol) -> Self {
self } /// Set the trigger source. #[inline] #[must_use = "set_trg_sel returns a modified Cfgr"] pub const fn set_trg_sel(mut self, trigger: u32) -> Self { self.val &= !(0b111 << 13); self.val |= (trigger & 0b111) << 13; self } /// Set the trigger filter. #[inline] #[must_use = "set_trg_filter returns a modified Cfgr"] pub const fn set_trg_filter(mut self, filter: Filter) -> Self { self.val &= !(0b111 << 6); self.val |= ((filter as u32) & 0b111) << 6; self } /// Get the prescaler value. /// /// # Example /// /// ``` /// use stm32wl_hal::lptim::{Cfgr, Prescaler}; /// /// assert_eq!(Cfgr::default().prescaler(), Prescaler::default()); /// ``` #[inline] pub const fn prescaler(&self) -> Prescaler { match (self.val >> 9) & 0b111 { 0b000 => Prescaler::Div1, 0b001 => Prescaler::Div2, 0b010 => Prescaler::Div4, 0b011 => Prescaler::Div8, 0b100 => Prescaler::Div16, 0b101 => Prescaler::Div32, 0b110 => Prescaler::Div64, _ => Prescaler::Div128, } } /// Set the prescaler value. /// /// # Example /// /// ``` /// use stm32wl_hal::lptim::{Cfgr, Prescaler}; /// /// let cfgr: Cfgr = Cfgr::RESET; /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div1); /// assert_eq!(cfgr.prescaler(), Prescaler::Div1); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div2); /// assert_eq!(cfgr.prescaler(), Prescaler::Div2); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div4); /// assert_eq!(cfgr.prescaler(), Prescaler::Div4); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div8); /// assert_eq!(cfgr.prescaler(), Prescaler::Div8); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div16); /// assert_eq!(cfgr.prescaler(), Prescaler::Div16); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div32); /// assert_eq!(cfgr.prescaler(), Prescaler::Div32); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div64); /// assert_eq!(cfgr.prescaler(), Prescaler::Div64); /// /// let cfgr: Cfgr = cfgr.set_prescaler(Prescaler::Div128); /// assert_eq!(cfgr.prescaler(), Prescaler::Div128); /// ``` #[inline] #[must_use = "set_prescaler returns a modified Cfgr"] pub const fn set_prescaler(mut self, pres: Prescaler) -> Self { self.val &= !(0b111 << 9); self.val |= (pres as u32) << 9; self } } impl From<u32> for Cfgr { fn from(val: u32) -> Self { Self { val } } } impl From<Cfgr> for u32 { fn from(cr: Cfgr) -> Self { cr.val } } impl Default for Cfgr { fn default() -> Self { Cfgr::RESET } }
self.val &= !(0b11 << 17); self.val |= (trg_pol as u32) << 17;
move_semantics4.rs
// move_semantics4.rs // Refactor this code so that instead of having `vec0` and creating the vector // in `fn main`, we instead create it within `fn fill_vec` and transfer the // freshly created vector from fill_vec to its caller. // Execute `rustlings hint move_semantics4` for hints! fn
() { let vec0 = Vec::<i32>::new(); let mut vec1 = fill_vec(); println!("{} has length {} content `{:?}`", "vec1", vec1.len(), vec1); vec1.push(88); println!("{} has length {} content `{:?}`", "vec1", vec1.len(), vec1); } // `fill_vec()` no longer take `vec: Vec<i32>` as argument fn fill_vec() -> Vec<i32> { let mut vec = Vec::<i32>::new(); vec.push(22); vec.push(44); vec.push(66); vec }
main
koi8_u.rs
// AUTOGENERATED FROM index-koi8-u.txt, ORIGINAL COMMENT FOLLOWS: // // Any copyright is dedicated to the Public Domain. // https://creativecommons.org/publicdomain/zero/1.0/ // // For details on index index-koi8-u.txt see the Encoding Standard // https://encoding.spec.whatwg.org/ // // Identifier: f9609f62c683e8f4ee8e9c68c6a6e18b6043b381e6f7d512c53fcaf9894e3a94 // Date: 2014-12-19 static FORWARD_TABLE: &'static [u16] = &[ 9472, 9474, 9484, 9488, 9492, 9496, 9500, 9508, 9516, 9524, 9532, 9600, 9604, 9608, 9612, 9616, 9617, 9618, 9619, 8992, 9632, 8729, 8730, 8776, 8804, 8805, 160, 8993, 176, 178, 183, 247, 9552, 9553, 9554, 1105, 1108, 9556, 1110, 1111, 9559, 9560, 9561, 9562, 9563, 1169, 9565, 9566, 9567, 9568, 9569, 1025, 1028, 9571, 1030, 1031, 9574, 9575, 9576, 9577, 9578, 1168, 9580, 169, 1102, 1072, 1073, 1094, 1076, 1077, 1092, 1075, 1093, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1103, 1088, 1089, 1090, 1091, 1078, 1074, 1100, 1099, 1079, 1096, 1101, 1097, 1095, 1098, 1070, 1040, 1041, 1062, 1044, 1045, 1060, 1043, 1061, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1071, 1056, 1057, 1058, 1059, 1046, 1042, 1068, 1067, 1047, 1064, 1069, 1065, 1063, 1066, ]; /// Returns the index code point for pointer `code` in this index. #[inline] pub fn
(code: u8) -> u16 { FORWARD_TABLE[(code - 0x80) as usize] } static BACKWARD_TABLE_LOWER: &'static [u8] = &[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 154, 0, 0, 0, 0, 0, 0, 0, 0, 191, 0, 0, 0, 0, 0, 0, 156, 0, 157, 0, 0, 0, 0, 158, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 179, 0, 0, 180, 0, 182, 183, 0, 0, 0, 0, 0, 0, 0, 0, 225, 226, 247, 231, 228, 229, 246, 250, 233, 234, 235, 236, 237, 238, 239, 240, 242, 243, 244, 245, 230, 232, 227, 254, 251, 253, 255, 249, 248, 252, 224, 241, 193, 194, 215, 199, 196, 197, 214, 218, 201, 202, 203, 204, 205, 206, 207, 208, 210, 211, 212, 213, 198, 200, 195, 222, 219, 221, 223, 217, 216, 220, 192, 209, 0, 163, 0, 0, 164, 0, 166, 167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 189, 173, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 149, 150, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 152, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 147, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 0, 0, 0, 131, 0, 0, 0, 132, 0, 0, 0, 133, 0, 0, 0, 134, 0, 0, 0, 0, 0, 0, 0, 135, 0, 0, 0, 0, 0, 0, 0, 136, 0, 0, 0, 0, 0, 0, 0, 137, 0, 0, 0, 0, 0, 0, 0, 138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 160, 161, 162, 0, 165, 0, 0, 168, 169, 170, 171, 172, 0, 174, 175, 176, 177, 178, 0, 181, 0, 0, 184, 185, 186, 187, 188, 0, 190, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 0, 0, 0, 140, 0, 0, 0, 141, 0, 0, 0, 142, 0, 0, 0, 143, 144, 145, 146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; static BACKWARD_TABLE_UPPER: &'static [u16] = &[ 0, 0, 0, 0, 0, 32, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 128, 160, 0, 192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 0, 256, 288, 0, 0, 0, 0, 0, 320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 352, 384, 416, 448, 480, 512, ]; /// Returns the index pointer for code point `code` in this index. #[inline] pub fn backward(code: u32) -> u8 { let offset = (code >> 5) as usize; let offset = if offset < 302 {BACKWARD_TABLE_UPPER[offset] as usize} else {0}; BACKWARD_TABLE_LOWER[offset + ((code & 31) as usize)] } #[cfg(feature = "enclave_unit_test")] single_byte_tests!( mod = koi8_u );
forward
p2p-fullblocktest.py
#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test block processing. This reimplements tests from the bitcoinj/FullBlockTestGenerator used by the pull-tester. We use the testing framework in which we expect a particular answer from each test. """ from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.comptool import TestManager, TestInstance, RejectResult from test_framework.blocktools import * from test_framework.key import CECKey from test_framework.script import * import struct class PreviousSpendableOutput(object): def __init__(self, tx = CTransaction(), n = -1): self.tx = tx self.n = n # the output we're spending # Use this class for tests that require behavior other than normal "mininode" behavior. # For now, it is used to serialize a bloated varint (b64). class CBrokenBlock(CBlock):
class FullBlockTest(ComparisonTestFramework): # Can either run this test as 1 node with expected answers, or two and compare them. # Change the "outcome" variable from each TestInstance object to only do the comparison. def __init__(self): super().__init__() self.num_nodes = 1 self.block_heights = {} self.coinbase_key = CECKey() self.coinbase_key.set_secretbytes(b"horsebattery") self.coinbase_pubkey = self.coinbase_key.get_pubkey() self.tip = None self.blocks = {} def setup_network(self): # Must set '-dip3params=2000:2000' to create pre-dip3 blocks only self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args=[['-whitelist=127.0.0.1', '-dip3params=2000:2000']], binary=[self.options.testbinary]) def add_options(self, parser): super().add_options(parser) parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True) def run_test(self): self.test = TestManager(self, self.options.tmpdir) self.test.add_all_connections(self.nodes) NetworkThread().start() # Start up network handling in another thread sync_masternodes(self.nodes, True) self.test.run() def add_transactions_to_block(self, block, tx_list): [ tx.rehash() for tx in tx_list ] block.vtx.extend(tx_list) # this is a little handier to use than the version in blocktools.py def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])): tx = create_transaction(spend_tx, n, b"", value, script) return tx # sign a transaction, using the key we know about # this signs input 0 in tx, which is assumed to be spending output n in spend_tx def sign_tx(self, tx, spend_tx, n): scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey) if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend tx.vin[0].scriptSig = CScript() return (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL) tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))]) def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])): tx = self.create_tx(spend_tx, n, value, script) self.sign_tx(tx, spend_tx, n) tx.rehash() return tx def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True): if self.tip == None: base_block_hash = self.genesis_hash block_time = get_mocktime() + 1 else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) coinbase.vout[0].nValue += additional_coinbase_value coinbase.rehash() if spend == None: block = create_block(base_block_hash, coinbase, block_time) else: coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees coinbase.rehash() block = create_block(base_block_hash, coinbase, block_time) tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi self.sign_tx(tx, spend.tx, spend.n) self.add_transactions_to_block(block, [tx]) block.hashMerkleRoot = block.calc_merkle_root() if solve: block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block def get_tests(self): self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # returns a test case that asserts that the current tip was accepted def accepted(): return TestInstance([[self.tip, True]]) # returns a test case that asserts that the current tip was rejected def rejected(reject = None): if reject is None: return TestInstance([[self.tip, False]]) else: return TestInstance([[self.tip, reject]]) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] self.add_transactions_to_block(block, new_transactions) old_sha256 = block.sha256 block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # shorthand for functions block = self.next_block create_tx = self.create_tx create_and_sign_tx = self.create_and_sign_transaction # these must be updated if consensus changes MAX_BLOCK_SIGOPS = 20000 # Create a new block block(0) save_spendable_output() yield accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(99): block(5000 + i) test.blocks_and_transactions.append([self.tip, True]) save_spendable_output() yield test # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(33): out.append(get_spendable_output()) # Start by building a couple of blocks on top (which output is spent is # in parentheses): # genesis -> b1 (0) -> b2 (1) block(1, spend=out[0]) save_spendable_output() yield accepted() block(2, spend=out[1]) yield accepted() save_spendable_output() # so fork like this: # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) # # Nothing should happen at this point. We saw b2 first so it takes priority. tip(1) b3 = block(3, spend=out[1]) txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0) yield rejected() # Now we add another block to make the alternative chain longer. # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) -> b4 (2) block(4, spend=out[2]) yield accepted() # ... and back to the first chain. # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b3 (1) -> b4 (2) tip(2) block(5, spend=out[2]) save_spendable_output() yield rejected() block(6, spend=out[3]) yield accepted() # Try to create a fork that double-spends # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b7 (2) -> b8 (4) # \-> b3 (1) -> b4 (2) tip(5) block(7, spend=out[2]) yield rejected() block(8, spend=out[4]) yield rejected() # Try to create a block that has too much fee # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b9 (4) # \-> b3 (1) -> b4 (2) tip(6) block(9, spend=out[4], additional_coinbase_value=1) yield rejected(RejectResult(16, b'bad-cb-amount')) # Create a fork that ends in a block with too much fee (the one that causes the reorg) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b10 (3) -> b11 (4) # \-> b3 (1) -> b4 (2) tip(5) block(10, spend=out[3]) yield rejected() block(11, spend=out[4], additional_coinbase_value=1) yield rejected(RejectResult(16, b'bad-cb-amount')) # Try again, but with a valid fork first # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b14 (5) # (b12 added last) # \-> b3 (1) -> b4 (2) tip(5) b12 = block(12, spend=out[3]) save_spendable_output() b13 = block(13, spend=out[4]) # Deliver the block header for b12, and the block b13. # b13 should be accepted but the tip won't advance until b12 is delivered. yield TestInstance([[CBlockHeader(b12), None], [b13, False]]) save_spendable_output() # b14 is invalid, but the node won't know that until it tries to connect # Tip still can't advance because b12 is missing block(14, spend=out[5], additional_coinbase_value=1) yield rejected() yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13. # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6) # \-> b3 (1) -> b4 (2) # Test that a block with a lot of checksigs is okay lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1)) tip(13) block(15, spend=out[5], script=lots_of_checksigs) yield accepted() save_spendable_output() # Test that a block with too many checksigs is rejected too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS)) block(16, spend=out[6], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # Attempt to spend a transaction created on a different fork # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1]) # \-> b3 (1) -> b4 (2) tip(15) block(17, spend=txout_b3) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Attempt to spend a transaction created on a different fork (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b18 (b3.vtx[1]) -> b19 (6) # \-> b3 (1) -> b4 (2) tip(13) block(18, spend=txout_b3) yield rejected() block(19, spend=out[6]) yield rejected() # Attempt to spend a coinbase at depth too low # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7) # \-> b3 (1) -> b4 (2) tip(15) block(20, spend=out[7]) yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase')) # Attempt to spend a coinbase at depth too low (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b21 (6) -> b22 (5) # \-> b3 (1) -> b4 (2) tip(13) block(21, spend=out[6]) yield rejected() block(22, spend=out[5]) yield rejected() # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) # \-> b24 (6) -> b25 (7) # \-> b3 (1) -> b4 (2) tip(15) b23 = block(23, spend=out[6]) tx = CTransaction() script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0))) b23 = update_block(23, [tx]) # Make sure the math above worked out to produce a max-sized block assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE) yield accepted() save_spendable_output() # Make the next block one byte bigger and check that it fails tip(15) b24 = block(24, spend=out[6]) script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69 script_output = CScript([b'\x00' * (script_length+1)]) tx.vout = [CTxOut(0, script_output)] b24 = update_block(24, [tx]) assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1) yield rejected(RejectResult(16, b'bad-blk-length')) block(25, spend=out[7]) yield rejected() # Create blocks with a coinbase input script size out of range # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) # \-> ... (6) -> ... (7) # \-> b3 (1) -> b4 (2) tip(15) b26 = block(26, spend=out[6]) b26.vtx[0].vin[0].scriptSig = b'\x00' b26.vtx[0].rehash() # update_block causes the merkle root to get updated, even with no new # transactions, and updates the required state. b26 = update_block(26, []) yield rejected(RejectResult(16, b'bad-cb-length')) # Extend the b26 chain to make sure bitcoind isn't accepting b26 b27 = block(27, spend=out[7]) yield rejected(RejectResult(0, b'bad-prevblk')) # Now try a too-large-coinbase script tip(15) b28 = block(28, spend=out[6]) b28.vtx[0].vin[0].scriptSig = b'\x00' * 101 b28.vtx[0].rehash() b28 = update_block(28, []) yield rejected(RejectResult(16, b'bad-cb-length')) # Extend the b28 chain to make sure bitcoind isn't accepting b28 b29 = block(29, spend=out[7]) yield rejected(RejectResult(0, b'bad-prevblk')) # b30 has a max-sized coinbase scriptSig. tip(23) b30 = block(30) b30.vtx[0].vin[0].scriptSig = b'\x00' * 100 b30.vtx[0].rehash() b30 = update_block(30, []) yield accepted() save_spendable_output() # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY # # genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b36 (11) # \-> b34 (10) # \-> b32 (9) # # MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end. lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) b31 = block(31, spend=out[8], script=lots_of_multisigs) assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS) yield accepted() save_spendable_output() # this goes over the limit because the coinbase has one sigop too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20)) b32 = block(32, spend=out[9], script=too_many_multisigs) assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1) yield rejected(RejectResult(16, b'bad-blk-sigops')) # CHECKMULTISIGVERIFY tip(31) lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) block(33, spend=out[9], script=lots_of_multisigs) yield accepted() save_spendable_output() too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20)) block(34, spend=out[10], script=too_many_multisigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # CHECKSIGVERIFY tip(33) lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1)) b35 = block(35, spend=out[10], script=lots_of_checksigs) yield accepted() save_spendable_output() too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS)) block(36, spend=out[11], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # Check spending of a transaction in a block which failed to connect # # b6 (3) # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b37 (11) # \-> b38 (11/37) # # save 37's spendable output, but then double-spend out11 to invalidate the block tip(35) b37 = block(37, spend=out[11]) txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0) tx = create_and_sign_tx(out[11].tx, out[11].n, 0) b37 = update_block(37, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid tip(35) block(38, spend=txout_b37) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Check P2SH SigOp counting # # # 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) # \-> b40 (12) # # b39 - create some P2SH outputs that will require 6 sigops to spend: # # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL # tip(35) b39 = block(39) b39_outputs = 0 b39_sigops_per_output = 6 # Build the redeem script, hash it, use hash to create the p2sh script redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE # This must be signed because it is spending a coinbase spend = out[11] tx = create_tx(spend.tx, spend.n, 1, p2sh_script) tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE]))) self.sign_tx(tx, spend.tx, spend.n) tx.rehash() b39 = update_block(39, [tx]) b39_outputs += 1 # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE tx_new = None tx_last = tx total_size=len(b39.serialize()) while(total_size < MAX_BLOCK_SIZE): tx_new = create_tx(tx_last, 1, 1, p2sh_script) tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE]))) tx_new.rehash() total_size += len(tx_new.serialize()) if total_size >= MAX_BLOCK_SIZE: break b39.vtx.append(tx_new) # add tx to block tx_last = tx_new b39_outputs += 1 b39 = update_block(39, []) yield accepted() save_spendable_output() # Test sigops in P2SH redeem scripts # # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 24605 sigops. # The first tx has one sigop and then at the end we add 2 more to put us just over the max. # # b41 does the same, less one, so it has the maximum sigops permitted. # tip(39) b40 = block(40, spend=out[12]) sigops = get_legacy_sigopcount_block(b40) numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output assert_equal(numTxes <= b39_outputs, True) lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) new_txs = [] for i in range(1, numTxes+1): tx = CTransaction() tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) tx.vin.append(CTxIn(lastOutpoint, b'')) # second input is corresponding P2SH output from b39 tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) # Note: must pass the redeem_script (not p2sh_script) to the signature hash function (sighash, err) = SignatureHash(redeem_script, tx, 1, SIGHASH_ALL) sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL])) scriptSig = CScript([sig, redeem_script]) tx.vin[1].scriptSig = scriptSig tx.rehash() new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) tx.rehash() new_txs.append(tx) update_block(40, new_txs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # same as b40, but one less sigop tip(39) b41 = block(41, spend=None) update_block(41, b40.vtx[1:-1]) b41_sigops_to_fill = b40_sigops_to_fill - 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) tx.rehash() update_block(41, [tx]) yield accepted() # Fork off of b39 to create a constant base again # # b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) # \-> b41 (12) # tip(39) block(42, spend=out[12]) yield rejected() save_spendable_output() block(43, spend=out[13]) yield accepted() save_spendable_output() # Test a number of really invalid scenarios # # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14) # \-> ??? (15) # The next few blocks are going to be created "by hand" since they'll do funky things, such as having # the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works. height = self.block_heights[self.tip.sha256] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) b44 = CBlock() b44.nTime = self.tip.nTime + 1 b44.hashPrevBlock = self.tip.sha256 b44.nBits = 0x207fffff b44.vtx.append(coinbase) b44.hashMerkleRoot = b44.calc_merkle_root() b44.solve() self.tip = b44 self.block_heights[b44.sha256] = height self.blocks[44] = b44 yield accepted() # A block with a non-coinbase as the first tx non_coinbase = create_tx(out[15].tx, out[15].n, 1) b45 = CBlock() b45.nTime = self.tip.nTime + 1 b45.hashPrevBlock = self.tip.sha256 b45.nBits = 0x207fffff b45.vtx.append(non_coinbase) b45.hashMerkleRoot = b45.calc_merkle_root() b45.calc_sha256() b45.solve() self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256]+1 self.tip = b45 self.blocks[45] = b45 yield rejected(RejectResult(16, b'bad-cb-missing')) # A block with no txns tip(44) b46 = CBlock() b46.nTime = b44.nTime+1 b46.hashPrevBlock = b44.sha256 b46.nBits = 0x207fffff b46.vtx = [] b46.hashMerkleRoot = 0 b46.solve() self.block_heights[b46.sha256] = self.block_heights[b44.sha256]+1 self.tip = b46 assert 46 not in self.blocks self.blocks[46] = b46 s = ser_uint256(b46.hashMerkleRoot) yield rejected(RejectResult(16, b'bad-blk-length')) # A block with invalid work tip(44) b47 = block(47, solve=False) target = uint256_from_compact(b47.nBits) while b47.sha256 < target: #changed > to < b47.nNonce += 1 b47.rehash() yield rejected(RejectResult(16, b'high-hash')) # A block with timestamp > 2 hrs in the future tip(44) b48 = block(48, solve=False) b48.nTime = get_mocktime() + 60 * 60 * 3 b48.solve() yield rejected(RejectResult(16, b'time-too-new')) # A block with an invalid merkle hash tip(44) b49 = block(49) b49.hashMerkleRoot += 1 b49.solve() yield rejected(RejectResult(16, b'bad-txnmrklroot')) # A block with an incorrect POW limit tip(44) b50 = block(50) b50.nBits = b50.nBits - 1 b50.solve() yield rejected(RejectResult(16, b'bad-diffbits')) # A block with two coinbase txns tip(44) b51 = block(51) cb2 = create_coinbase(51, self.coinbase_pubkey) b51 = update_block(51, [cb2]) yield rejected(RejectResult(16, b'bad-cb-multiple')) # A block w/ duplicate txns # Note: txns have to be in the right position in the merkle tree to trigger this error tip(44) b52 = block(52, spend=out[15]) tx = create_tx(b52.vtx[1], 0, 1) b52 = update_block(52, [tx, tx]) yield rejected(RejectResult(16, b'bad-txns-duplicate')) # Test block timestamps # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) # \-> b54 (15) # tip(43) block(53, spend=out[14]) yield rejected() # rejected since b44 is at same height save_spendable_output() # invalid timestamp (b35 is 5 blocks back, so its time is MedianTimePast) b54 = block(54, spend=out[15]) b54.nTime = b35.nTime - 1 b54.solve() yield rejected(RejectResult(16, b'time-too-old')) # valid timestamp tip(53) b55 = block(55, spend=out[15]) b55.nTime = b35.nTime update_block(55, []) yield accepted() save_spendable_output() # Test CVE-2012-2459 # # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16) # \-> b57 (16) # \-> b56p2 (16) # \-> b56 (16) # # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without # affecting the merkle root of a block, while still invalidating it. # See: src/consensus/merkle.h # # b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx. # Result: OK # # b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle # root but duplicate transactions. # Result: Fails # # b57p2 has six transactions in its merkle tree: # - coinbase, tx, tx1, tx2, tx3, tx4 # Merkle root calculation will duplicate as necessary. # Result: OK. # # b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches # duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates # that the error was caught early, avoiding a DOS vulnerability.) # b57 - a good block with 2 txs, don't submit until end tip(55) b57 = block(57) tx = create_and_sign_tx(out[16].tx, out[16].n, 1) tx1 = create_tx(tx, 0, 1) b57 = update_block(57, [tx, tx1]) # b56 - copy b57, add a duplicate tx tip(55) b56 = copy.deepcopy(b57) self.blocks[56] = b56 assert_equal(len(b56.vtx),3) b56 = update_block(56, [tx1]) assert_equal(b56.hash, b57.hash) yield rejected(RejectResult(16, b'bad-txns-duplicate')) # b57p2 - a good block with 6 tx'es, don't submit until end tip(55) b57p2 = block("57p2") tx = create_and_sign_tx(out[16].tx, out[16].n, 1) tx1 = create_tx(tx, 0, 1) tx2 = create_tx(tx1, 0, 1) tx3 = create_tx(tx2, 0, 1) tx4 = create_tx(tx3, 0, 1) b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4]) # b56p2 - copy b57p2, duplicate two non-consecutive tx's tip(55) b56p2 = copy.deepcopy(b57p2) self.blocks["b56p2"] = b56p2 assert_equal(b56p2.hash, b57p2.hash) assert_equal(len(b56p2.vtx),6) b56p2 = update_block("b56p2", [tx3, tx4]) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip("57p2") yield accepted() tip(57) yield rejected() #rejected because 57p2 seen first save_spendable_output() # Test a few invalid tx types # # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> ??? (17) # # tx with prevout.n out of range tip(57) b58 = block(58, spend=out[17]) tx = CTransaction() assert(len(out[17].tx.vout) < 42) tx.vin.append(CTxIn(COutPoint(out[17].tx.sha256, 42), CScript([OP_TRUE]), 0xffffffff)) tx.vout.append(CTxOut(0, b"")) tx.calc_sha256() b58 = update_block(58, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # tx with output value > input value out of range tip(57) b59 = block(59) tx = create_and_sign_tx(out[17].tx, out[17].n, 510*COIN) b59 = update_block(59, [tx]) yield rejected(RejectResult(16, b'bad-txns-in-belowout')) # reset to good chain tip(57) b60 = block(60, spend=out[17]) yield accepted() save_spendable_output() # Test BIP30 # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b61 (18) # # Blocks are not allowed to contain a transaction whose id matches that of an earlier, # not-fully-spent transaction in the same chain. To test, make identical coinbases; # the second one should be rejected. # tip(60) b61 = block(61, spend=out[18]) b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig #equalize the coinbases b61.vtx[0].rehash() b61 = update_block(61, []) assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize()) yield rejected(RejectResult(16, b'bad-txns-BIP30')) # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests) # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b62 (18) # tip(60) b62 = block(62) tx = CTransaction() tx.nLockTime = 0xffffffff #this locktime is non-final assert(out[18].n < len(out[18].tx.vout)) tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256, out[18].n))) # don't set nSequence tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) assert(tx.vin[0].nSequence < 0xffffffff) tx.calc_sha256() b62 = update_block(62, [tx]) yield rejected(RejectResult(16, b'bad-txns-nonfinal')) # Test a non-final coinbase is also rejected # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b63 (-) # tip(60) b63 = block(63) b63.vtx[0].nLockTime = 0xffffffff b63.vtx[0].vin[0].nSequence = 0xDEADBEEF b63.vtx[0].rehash() b63 = update_block(63, []) yield rejected(RejectResult(16, b'bad-txns-nonfinal')) # This checks that a block with a bloated VARINT between the block_header and the array of tx such that # the block is > MAX_BLOCK_SIZE with the bloated varint, but <= MAX_BLOCK_SIZE without the bloated varint, # does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not # care whether the bloated block is accepted or rejected; it only cares that the second block is accepted. # # What matters is that the receiving node should not reject the bloated block, and then reject the canonical # block on the basis that it's the same as an already-rejected block (which would be a consensus failure.) # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) # \ # b64a (18) # b64a is a bloated block (non-canonical varint) # b64 is a good block (same as b64 but w/ canonical varint) # tip(60) regular_block = block("64a", spend=out[18]) # make it a "broken_block," with non-canonical serialization b64a = CBrokenBlock(regular_block) b64a.initialize(regular_block) self.blocks["64a"] = b64a self.tip = b64a tx = CTransaction() # use canonical serialization to calculate size script_length = MAX_BLOCK_SIZE - len(b64a.normal_serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0))) b64a = update_block("64a", [tx]) assert_equal(len(b64a.serialize()), MAX_BLOCK_SIZE + 8) yield TestInstance([[self.tip, None]]) # comptool workaround: to make sure b64 is delivered, manually erase b64a from blockstore self.test.block_store.erase(b64a.sha256) tip(60) b64 = CBlock(b64a) b64.vtx = copy.deepcopy(b64a.vtx) assert_equal(b64.hash, b64a.hash) assert_equal(len(b64.serialize()), MAX_BLOCK_SIZE) self.blocks[64] = b64 update_block(64, []) yield accepted() save_spendable_output() # Spend an output created in the block itself # # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # tip(64) b65 = block(65) tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 0) update_block(65, [tx1, tx2]) yield accepted() save_spendable_output() # Attempt to spend an output created later in the same block # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # \-> b66 (20) tip(65) b66 = block(66) tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 1) update_block(66, [tx2, tx1]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Attempt to double-spend a transaction created in a block # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # \-> b67 (20) # # tip(65) b67 = block(67) tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 1) tx3 = create_and_sign_tx(tx1, 0, 2) update_block(67, [tx1, tx2, tx3]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # More tests of block subsidy # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) # \-> b68 (20) # # b68 - coinbase with an extra 10 satoshis, # creates a tx that has 9 satoshis from out[20] go to fees # this fails because the coinbase is trying to claim 1 satoshi too much in fees # # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee # this succeeds # tip(65) b68 = block(68, additional_coinbase_value=10) tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9) update_block(68, [tx]) yield rejected(RejectResult(16, b'bad-cb-amount')) tip(65) b69 = block(69, additional_coinbase_value=10) tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-10) update_block(69, [tx]) yield accepted() save_spendable_output() # Test spending the outpoint of a non-existent transaction # # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) # \-> b70 (21) # tip(69) block(70, spend=out[21]) bogus_tx = CTransaction() bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c") tx = CTransaction() tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff)) tx.vout.append(CTxOut(1, b"")) update_block(70, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks) # # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) # \-> b71 (21) # # b72 is a good block. # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b71. # tip(69) b72 = block(72) tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2) tx2 = create_and_sign_tx(tx1, 0, 1) b72 = update_block(72, [tx1, tx2]) # now tip is 72 b71 = copy.deepcopy(b72) b71.vtx.append(tx2) # add duplicate tx2 self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69 self.blocks[71] = b71 assert_equal(len(b71.vtx), 4) assert_equal(len(b72.vtx), 3) assert_equal(b72.sha256, b71.sha256) tip(71) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip(72) yield accepted() save_spendable_output() # Test some invalid scripts and MAX_BLOCK_SIGOPS # # -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) # \-> b** (22) # # b73 - tx with excessive sigops that are placed after an excessively large script element. # The purpose of the test is to make sure those sigops are counted. # # script is a bytearray of size 20,526 # # bytearray[0-19,998] : OP_CHECKSIG # bytearray[19,999] : OP_PUSHDATA4 # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) # bytearray[20,004-20,525]: unread data (script_element) # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) # tip(72) b73 = block(73) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS - 1] = int("4e",16) # OP_PUSHDATA4 element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 a[MAX_BLOCK_SIGOPS] = element_size % 256 a[MAX_BLOCK_SIGOPS+1] = element_size // 256 a[MAX_BLOCK_SIGOPS+2] = 0 a[MAX_BLOCK_SIGOPS+3] = 0 tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b73 = update_block(73, [tx]) assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS+1) yield rejected(RejectResult(16, b'bad-blk-sigops')) # b74/75 - if we push an invalid script element, all prevous sigops are counted, # but sigops after the element are not counted. # # The invalid script element is that the push_data indicates that # there will be a large amount of data (0xffffff bytes), but we only # provide a much smaller number. These bytes are CHECKSIGS so they would # cause b75 to fail for excessive sigops, if those bytes were counted. # # b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element # b75 succeeds because we put MAX_BLOCK_SIGOPS before the element # # tip(72) b74 = block(74) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS] = 0x4e a[MAX_BLOCK_SIGOPS+1] = 0xfe a[MAX_BLOCK_SIGOPS+2] = 0xff a[MAX_BLOCK_SIGOPS+3] = 0xff a[MAX_BLOCK_SIGOPS+4] = 0xff tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b74 = update_block(74, [tx]) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(72) b75 = block(75) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS-1] = 0x4e a[MAX_BLOCK_SIGOPS] = 0xff a[MAX_BLOCK_SIGOPS+1] = 0xff a[MAX_BLOCK_SIGOPS+2] = 0xff a[MAX_BLOCK_SIGOPS+3] = 0xff tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b75 = update_block(75, [tx]) yield accepted() save_spendable_output() # Check that if we push an element filled with CHECKSIGs, they are not counted tip(75) b76 = block(76) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS-1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a)) b76 = update_block(76, [tx]) yield accepted() save_spendable_output() # Test transaction resurrection # # -> b77 (24) -> b78 (25) -> b79 (26) # \-> b80 (25) -> b81 (26) -> b82 (27) # # b78 creates a tx, which is spent in b79. After b82, both should be in mempool # # The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the # rather obscure reason that the Python signature code does not distinguish between # Low-S and High-S values (whereas the bitcoin code has custom code which does so); # as a result of which, the odds are 50% that the python code will use the right # value and the transaction will be accepted into the mempool. Until we modify the # test framework to support low-S signing, we are out of luck. # # To get around this issue, we construct transactions which are not signed and which # spend to OP_TRUE. If the standard-ness rules change, this test would need to be # updated. (Perhaps to spend to a P2SH OP_TRUE script) # tip(76) block(77) tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10*COIN) update_block(77, [tx77]) yield accepted() save_spendable_output() block(78) tx78 = create_tx(tx77, 0, 9*COIN) update_block(78, [tx78]) yield accepted() block(79) tx79 = create_tx(tx78, 0, 8*COIN) update_block(79, [tx79]) yield accepted() # mempool should be empty assert_equal(len(self.nodes[0].getrawmempool()), 0) tip(77) block(80, spend=out[25]) yield rejected() save_spendable_output() block(81, spend=out[26]) yield rejected() # other chain is same length save_spendable_output() block(82, spend=out[27]) yield accepted() # now this chain is longer, triggers re-org save_spendable_output() # now check that tx78 and tx79 have been put back into the peer's mempool mempool = self.nodes[0].getrawmempool() assert_equal(len(mempool), 2) assert(tx78.hash in mempool) assert(tx79.hash in mempool) # Test invalid opcodes in dead execution paths. # # -> b81 (26) -> b82 (27) -> b83 (28) # b83 = block(83) op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF] script = CScript(op_codes) tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script) tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE])) tx2.vin[0].scriptSig = CScript([OP_FALSE]) tx2.rehash() update_block(83, [tx1, tx2]) yield accepted() save_spendable_output() # Reorg on/off blocks that have OP_RETURN in them (and try to spend them) # # -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31) # \-> b85 (29) -> b86 (30) \-> b89a (32) # # b84 = block(84) tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN])) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.calc_sha256() self.sign_tx(tx1, out[29].tx, out[29].n) tx1.rehash() tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN])) tx2.vout.append(CTxOut(0, CScript([OP_RETURN]))) tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN])) tx3.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE])) tx4.vout.append(CTxOut(0, CScript([OP_RETURN]))) tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN])) update_block(84, [tx1,tx2,tx3,tx4,tx5]) yield accepted() save_spendable_output() tip(83) block(85, spend=out[29]) yield rejected() block(86, spend=out[30]) yield accepted() tip(84) block(87, spend=out[30]) yield rejected() save_spendable_output() block(88, spend=out[31]) yield accepted() save_spendable_output() # trying to spend the OP_RETURN output is rejected block("89a", spend=out[32]) tx = create_tx(tx1, 0, 0, CScript([OP_TRUE])) update_block("89a", [tx]) yield rejected() # Test re-org of a ~2 days' worth of blocks (1088 blocks) # This test takes a minute or two and can be accomplished in memory # if self.options.runbarelyexpensive: tip(88) LARGE_REORG_SIZE = 1088 test1 = TestInstance(sync_every_block=False) spend=out[32] for i in range(89, LARGE_REORG_SIZE + 89): b = block(i, spend) tx = CTransaction() script_length = MAX_BLOCK_SIZE - len(b.serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0))) b = update_block(i, [tx]) assert_equal(len(b.serialize()), MAX_BLOCK_SIZE) test1.blocks_and_transactions.append([self.tip, True]) save_spendable_output() spend = get_spendable_output() yield test1 chain1_tip = i # now create alt chain of same length tip(88) test2 = TestInstance(sync_every_block=False) for i in range(89, LARGE_REORG_SIZE + 89): block("alt"+str(i)) test2.blocks_and_transactions.append([self.tip, False]) yield test2 # extend alt chain to trigger re-org block("alt" + str(chain1_tip + 1)) yield accepted() # ... and re-org back to the first chain tip(chain1_tip) block(chain1_tip + 1) yield rejected() block(chain1_tip + 2) yield accepted() chain1_tip += 2 if __name__ == '__main__': FullBlockTest().main()
def __init__(self, header=None): super(CBrokenBlock, self).__init__(header) def initialize(self, base_block): self.vtx = copy.deepcopy(base_block.vtx) self.hashMerkleRoot = self.calc_merkle_root() def serialize(self): r = b"" r += super(CBlock, self).serialize() r += struct.pack("<BQ", 255, len(self.vtx)) for tx in self.vtx: r += tx.serialize() return r def normal_serialize(self): r = b"" r += super(CBrokenBlock, self).serialize() return r
test_identity_scale_clean_pass.py
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np from auto_scan_test import PassAutoScanTest from program_config import TensorConfig, ProgramConfig, OpConfig import paddle.inference as paddle_infer import unittest import hypothesis.strategies as st class TestIdentityScaleCleanPass(PassAutoScanTest): def sample_predictor_configs(self, program_config): config = self.create_trt_inference_config() config.enable_tensorrt_engine( max_batch_size=8, workspace_size=0, min_subgraph_size=0, precision_mode=paddle_infer.PrecisionType.Float32, use_static=False, use_calib_mode=False) yield config, ['relu'], (1e-5, 1e-5) def sample_program_config(self, draw):
def test(self): self.run_and_statis(max_examples=25, passes=["identity_scale_op_clean_pass"]) if __name__ == "__main__": unittest.main()
bias_after_scale = draw(st.booleans()) n = draw(st.integers(min_value=1, max_value=4)) c = draw(st.integers(min_value=1, max_value=20)) h = draw(st.integers(min_value=1, max_value=20)) w = draw(st.integers(min_value=1, max_value=20)) relu_op = OpConfig("relu", inputs={"X": ["relu_x"]}, outputs={"Out": ["relu_out"]}) scale_op = OpConfig("scale", inputs={"X": ["relu_out"]}, outputs={"Out": ["scale_out"]}, bias=0., scale=1., bias_after_scale=True) program_config = ProgramConfig( ops=[relu_op, scale_op], weights={}, inputs={"relu_x": TensorConfig(shape=[n, c, h, w])}, outputs=["scale_out"]) return program_config
collection.go
// Copyright (C) MongoDB, Inc. 2017-present. // // Licensed under the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. You may obtain // a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 package mongo import ( "context" "errors" "fmt" "strings" "time" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/bsoncodec" "go.mongodb.org/mongo-driver/bson/bsontype" "go.mongodb.org/mongo-driver/mongo/description" "go.mongodb.org/mongo-driver/mongo/options" "go.mongodb.org/mongo-driver/mongo/readconcern" "go.mongodb.org/mongo-driver/mongo/readpref" "go.mongodb.org/mongo-driver/mongo/writeconcern" "go.mongodb.org/mongo-driver/x/bsonx/bsoncore" "go.mongodb.org/mongo-driver/x/mongo/driver" "go.mongodb.org/mongo-driver/x/mongo/driver/operation" "go.mongodb.org/mongo-driver/x/mongo/driver/session" ) // Collection is a handle to a MongoDB collection. It is safe for concurrent use by multiple goroutines. type Collection struct { client *Client db *Database name string readConcern *readconcern.ReadConcern writeConcern *writeconcern.WriteConcern readPreference *readpref.ReadPref readSelector description.ServerSelector writeSelector description.ServerSelector registry *bsoncodec.Registry } // aggregateParams is used to store information to configure an Aggregate operation. type aggregateParams struct { ctx context.Context pipeline interface{} client *Client registry *bsoncodec.Registry readConcern *readconcern.ReadConcern writeConcern *writeconcern.WriteConcern retryRead bool db string col string readSelector description.ServerSelector writeSelector description.ServerSelector readPreference *readpref.ReadPref opts []*options.AggregateOptions } func closeImplicitSession(sess *session.Client) { if sess != nil && sess.SessionType == session.Implicit { sess.EndSession() } } func newCollection(db *Database, name string, opts ...*options.CollectionOptions) *Collection { collOpt := options.MergeCollectionOptions(opts...) rc := db.readConcern if collOpt.ReadConcern != nil { rc = collOpt.ReadConcern } wc := db.writeConcern if collOpt.WriteConcern != nil { wc = collOpt.WriteConcern } rp := db.readPreference if collOpt.ReadPreference != nil { rp = collOpt.ReadPreference } reg := db.registry if collOpt.Registry != nil { reg = collOpt.Registry } readSelector := description.CompositeSelector([]description.ServerSelector{ description.ReadPrefSelector(rp), description.LatencySelector(db.client.localThreshold), }) writeSelector := description.CompositeSelector([]description.ServerSelector{ description.WriteSelector(), description.LatencySelector(db.client.localThreshold), }) coll := &Collection{ client: db.client, db: db, name: name, readPreference: rp, readConcern: rc, writeConcern: wc, readSelector: readSelector, writeSelector: writeSelector, registry: reg, } return coll } func (coll *Collection) copy() *Collection { return &Collection{ client: coll.client, db: coll.db, name: coll.name, readConcern: coll.readConcern, writeConcern: coll.writeConcern, readPreference: coll.readPreference, readSelector: coll.readSelector, writeSelector: coll.writeSelector, registry: coll.registry, } } // Clone creates a copy of the Collection configured with the given CollectionOptions. // The specified options are merged with the existing options on the collection, with the specified options taking // precedence. func (coll *Collection) Clone(opts ...*options.CollectionOptions) (*Collection, error) { copyColl := coll.copy() optsColl := options.MergeCollectionOptions(opts...) if optsColl.ReadConcern != nil { copyColl.readConcern = optsColl.ReadConcern } if optsColl.WriteConcern != nil { copyColl.writeConcern = optsColl.WriteConcern } if optsColl.ReadPreference != nil { copyColl.readPreference = optsColl.ReadPreference } if optsColl.Registry != nil { copyColl.registry = optsColl.Registry } copyColl.readSelector = description.CompositeSelector([]description.ServerSelector{ description.ReadPrefSelector(copyColl.readPreference), description.LatencySelector(copyColl.client.localThreshold), }) return copyColl, nil } // Name returns the name of the collection. func (coll *Collection) Name() string { return coll.name } // Database returns the Database that was used to create the Collection. func (coll *Collection) Database() *Database { return coll.db } // BulkWrite performs a bulk write operation (https://docs.mongodb.com/manual/core/bulk-write-operations/). // // The models parameter must be a slice of operations to be executed in this bulk write. It cannot be nil or empty. // All of the models must be non-nil. See the mongo.WriteModel documentation for a list of valid model types and // examples of how they should be used. // // The opts parameter can be used to specify options for the operation (see the options.BulkWriteOptions documentation.) func (coll *Collection) BulkWrite(ctx context.Context, models []WriteModel, opts ...*options.BulkWriteOptions) (*BulkWriteResult, error) { if len(models) == 0 { return nil, ErrEmptySlice } if ctx == nil { ctx = context.Background() } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { var err error sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return nil, err } defer sess.EndSession() } err := coll.client.validSession(sess) if err != nil { return nil, err } wc := coll.writeConcern if sess.TransactionRunning() { wc = nil } if !writeconcern.AckWrite(wc) { sess = nil } selector := makePinnedSelector(sess, coll.writeSelector) for _, model := range models { if model == nil { return nil, ErrNilDocument } } bwo := options.MergeBulkWriteOptions(opts...) op := bulkWrite{ ordered: bwo.Ordered, bypassDocumentValidation: bwo.BypassDocumentValidation, models: models, session: sess, collection: coll, selector: selector, writeConcern: wc, } err = op.execute(ctx) return &op.result, replaceErrors(err) } func (coll *Collection) insert(ctx context.Context, documents []interface{}, opts ...*options.InsertManyOptions) ([]interface{}, error) { if ctx == nil { ctx = context.Background() } result := make([]interface{}, len(documents)) docs := make([]bsoncore.Document, len(documents)) for i, doc := range documents { var err error docs[i], result[i], err = transformAndEnsureID(coll.registry, doc) if err != nil { return nil, err } } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { var err error sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return nil, err } defer sess.EndSession() } err := coll.client.validSession(sess) if err != nil { return nil, err } wc := coll.writeConcern if sess.TransactionRunning() { wc = nil } if !writeconcern.AckWrite(wc) { sess = nil } selector := makePinnedSelector(sess, coll.writeSelector) op := operation.NewInsert(docs...). Session(sess).WriteConcern(wc).CommandMonitor(coll.client.monitor). ServerSelector(selector).ClusterClock(coll.client.clock). Database(coll.db.name).Collection(coll.name). Deployment(coll.client.deployment).Crypt(coll.client.cryptFLE).Ordered(true). ServerAPI(coll.client.serverAPI) imo := options.MergeInsertManyOptions(opts...) if imo.BypassDocumentValidation != nil && *imo.BypassDocumentValidation { op = op.BypassDocumentValidation(*imo.BypassDocumentValidation) } if imo.Ordered != nil { op = op.Ordered(*imo.Ordered) } retry := driver.RetryNone if coll.client.retryWrites { retry = driver.RetryOncePerCommand } op = op.Retry(retry) err = op.Execute(ctx) wce, ok := err.(driver.WriteCommandError) if !ok { return result, err } // remove the ids that had writeErrors from result for i, we := range wce.WriteErrors { // i indexes have been removed before the current error, so the index is we.Index-i idIndex := int(we.Index) - i // if the insert is ordered, nothing after the error was inserted if imo.Ordered == nil || *imo.Ordered { result = result[:idIndex] break } result = append(result[:idIndex], result[idIndex+1:]...) } return result, err } // InsertOne executes an insert command to insert a single document into the collection. // // The document parameter must be the document to be inserted. It cannot be nil. If the document does not have an _id // field when transformed into BSON, one will be added automatically to the marshalled document. The original document // will not be modified. The _id can be retrieved from the InsertedID field of the returned InsertOneResult. // // The opts parameter can be used to specify options for the operation (see the options.InsertOneOptions documentation.) // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/insert/. func (coll *Collection) InsertOne(ctx context.Context, document interface{}, opts ...*options.InsertOneOptions) (*InsertOneResult, error) { ioOpts := options.MergeInsertOneOptions(opts...) imOpts := options.InsertMany() if ioOpts.BypassDocumentValidation != nil && *ioOpts.BypassDocumentValidation { imOpts.SetBypassDocumentValidation(*ioOpts.BypassDocumentValidation) } res, err := coll.insert(ctx, []interface{}{document}, imOpts) rr, err := processWriteError(err) if rr&rrOne == 0 { return nil, err } return &InsertOneResult{InsertedID: res[0]}, err } // InsertMany executes an insert command to insert multiple documents into the collection. If write errors occur // during the operation (e.g. duplicate key error), this method returns a BulkWriteException error. // // The documents parameter must be a slice of documents to insert. The slice cannot be nil or empty. The elements must // all be non-nil. For any document that does not have an _id field when transformed into BSON, one will be added // automatically to the marshalled document. The original document will not be modified. The _id values for the inserted // documents can be retrieved from the InsertedIDs field of the returned InsertManyResult. // // The opts parameter can be used to specify options for the operation (see the options.InsertManyOptions documentation.) // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/insert/. func (coll *Collection) InsertMany(ctx context.Context, documents []interface{}, opts ...*options.InsertManyOptions) (*InsertManyResult, error) { if len(documents) == 0 { return nil, ErrEmptySlice } result, err := coll.insert(ctx, documents, opts...) rr, err := processWriteError(err) if rr&rrMany == 0 { return nil, err } imResult := &InsertManyResult{InsertedIDs: result} writeException, ok := err.(WriteException) if !ok { return imResult, err } // create and return a BulkWriteException bwErrors := make([]BulkWriteError, 0, len(writeException.WriteErrors)) for _, we := range writeException.WriteErrors { bwErrors = append(bwErrors, BulkWriteError{ WriteError: we,
} return imResult, BulkWriteException{ WriteErrors: bwErrors, WriteConcernError: writeException.WriteConcernError, Labels: writeException.Labels, } } func (coll *Collection) delete(ctx context.Context, filter interface{}, deleteOne bool, expectedRr returnResult, opts ...*options.DeleteOptions) (*DeleteResult, error) { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return nil, err } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return nil, err } defer sess.EndSession() } err = coll.client.validSession(sess) if err != nil { return nil, err } wc := coll.writeConcern if sess.TransactionRunning() { wc = nil } if !writeconcern.AckWrite(wc) { sess = nil } selector := makePinnedSelector(sess, coll.writeSelector) var limit int32 if deleteOne { limit = 1 } do := options.MergeDeleteOptions(opts...) didx, doc := bsoncore.AppendDocumentStart(nil) doc = bsoncore.AppendDocumentElement(doc, "q", f) doc = bsoncore.AppendInt32Element(doc, "limit", limit) if do.Collation != nil { doc = bsoncore.AppendDocumentElement(doc, "collation", do.Collation.ToDocument()) } if do.Hint != nil { hint, err := transformValue(coll.registry, do.Hint, false, "hint") if err != nil { return nil, err } doc = bsoncore.AppendValueElement(doc, "hint", hint) } doc, _ = bsoncore.AppendDocumentEnd(doc, didx) op := operation.NewDelete(doc). Session(sess).WriteConcern(wc).CommandMonitor(coll.client.monitor). ServerSelector(selector).ClusterClock(coll.client.clock). Database(coll.db.name).Collection(coll.name). Deployment(coll.client.deployment).Crypt(coll.client.cryptFLE).Ordered(true). ServerAPI(coll.client.serverAPI) if do.Hint != nil { op = op.Hint(true) } if do.Let != nil { let, err := transformBsoncoreDocument(coll.registry, do.Let, true, "let") if err != nil { return nil, err } op = op.Let(let) } // deleteMany cannot be retried retryMode := driver.RetryNone if deleteOne && coll.client.retryWrites { retryMode = driver.RetryOncePerCommand } op = op.Retry(retryMode) rr, err := processWriteError(op.Execute(ctx)) if rr&expectedRr == 0 { return nil, err } return &DeleteResult{DeletedCount: int64(op.Result().N)}, err } // DeleteOne executes a delete command to delete at most one document from the collection. // // The filter parameter must be a document containing query operators and can be used to select the document to be // deleted. It cannot be nil. If the filter does not match any documents, the operation will succeed and a DeleteResult // with a DeletedCount of 0 will be returned. If the filter matches multiple documents, one will be selected from the // matched set. // // The opts parameter can be used to specify options for the operation (see the options.DeleteOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/delete/. func (coll *Collection) DeleteOne(ctx context.Context, filter interface{}, opts ...*options.DeleteOptions) (*DeleteResult, error) { return coll.delete(ctx, filter, true, rrOne, opts...) } // DeleteMany executes a delete command to delete documents from the collection. // // The filter parameter must be a document containing query operators and can be used to select the documents to // be deleted. It cannot be nil. An empty document (e.g. bson.D{}) should be used to delete all documents in the // collection. If the filter does not match any documents, the operation will succeed and a DeleteResult with a // DeletedCount of 0 will be returned. // // The opts parameter can be used to specify options for the operation (see the options.DeleteOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/delete/. func (coll *Collection) DeleteMany(ctx context.Context, filter interface{}, opts ...*options.DeleteOptions) (*DeleteResult, error) { return coll.delete(ctx, filter, false, rrMany, opts...) } func (coll *Collection) updateOrReplace(ctx context.Context, filter bsoncore.Document, update interface{}, multi bool, expectedRr returnResult, checkDollarKey bool, opts ...*options.UpdateOptions) (*UpdateResult, error) { if ctx == nil { ctx = context.Background() } uo := options.MergeUpdateOptions(opts...) // collation, arrayFilters, upsert, and hint are included on the individual update documents rather than as part of the // command updateDoc, err := createUpdateDoc(filter, update, uo.Hint, uo.ArrayFilters, uo.Collation, uo.Upsert, multi, checkDollarKey, coll.registry) if err != nil { return nil, err } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { var err error sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return nil, err } defer sess.EndSession() } err = coll.client.validSession(sess) if err != nil { return nil, err } wc := coll.writeConcern if sess.TransactionRunning() { wc = nil } if !writeconcern.AckWrite(wc) { sess = nil } selector := makePinnedSelector(sess, coll.writeSelector) op := operation.NewUpdate(updateDoc). Session(sess).WriteConcern(wc).CommandMonitor(coll.client.monitor). ServerSelector(selector).ClusterClock(coll.client.clock). Database(coll.db.name).Collection(coll.name). Deployment(coll.client.deployment).Crypt(coll.client.cryptFLE).Hint(uo.Hint != nil). ArrayFilters(uo.ArrayFilters != nil).Ordered(true).ServerAPI(coll.client.serverAPI) if uo.Let != nil { let, err := transformBsoncoreDocument(coll.registry, uo.Let, true, "let") if err != nil { return nil, err } op = op.Let(let) } if uo.BypassDocumentValidation != nil && *uo.BypassDocumentValidation { op = op.BypassDocumentValidation(*uo.BypassDocumentValidation) } retry := driver.RetryNone // retryable writes are only enabled updateOne/replaceOne operations if !multi && coll.client.retryWrites { retry = driver.RetryOncePerCommand } op = op.Retry(retry) err = op.Execute(ctx) rr, err := processWriteError(err) if rr&expectedRr == 0 { return nil, err } opRes := op.Result() res := &UpdateResult{ MatchedCount: int64(opRes.N), ModifiedCount: int64(opRes.NModified), UpsertedCount: int64(len(opRes.Upserted)), } if len(opRes.Upserted) > 0 { res.UpsertedID = opRes.Upserted[0].ID res.MatchedCount-- } return res, err } // UpdateByID executes an update command to update the document whose _id value matches the provided ID in the collection. // This is equivalent to running UpdateOne(ctx, bson.D{{"_id", id}}, update, opts...). // // The id parameter is the _id of the document to be updated. It cannot be nil. If the ID does not match any documents, // the operation will succeed and an UpdateResult with a MatchedCount of 0 will be returned. // // The update parameter must be a document containing update operators // (https://docs.mongodb.com/manual/reference/operator/update/) and can be used to specify the modifications to be // made to the selected document. It cannot be nil or empty. // // The opts parameter can be used to specify options for the operation (see the options.UpdateOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/update/. func (coll *Collection) UpdateByID(ctx context.Context, id interface{}, update interface{}, opts ...*options.UpdateOptions) (*UpdateResult, error) { if id == nil { return nil, ErrNilValue } return coll.UpdateOne(ctx, bson.D{{"_id", id}}, update, opts...) } // UpdateOne executes an update command to update at most one document in the collection. // // The filter parameter must be a document containing query operators and can be used to select the document to be // updated. It cannot be nil. If the filter does not match any documents, the operation will succeed and an UpdateResult // with a MatchedCount of 0 will be returned. If the filter matches multiple documents, one will be selected from the // matched set and MatchedCount will equal 1. // // The update parameter must be a document containing update operators // (https://docs.mongodb.com/manual/reference/operator/update/) and can be used to specify the modifications to be // made to the selected document. It cannot be nil or empty. // // The opts parameter can be used to specify options for the operation (see the options.UpdateOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/update/. func (coll *Collection) UpdateOne(ctx context.Context, filter interface{}, update interface{}, opts ...*options.UpdateOptions) (*UpdateResult, error) { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return nil, err } return coll.updateOrReplace(ctx, f, update, false, rrOne, true, opts...) } // UpdateMany executes an update command to update documents in the collection. // // The filter parameter must be a document containing query operators and can be used to select the documents to be // updated. It cannot be nil. If the filter does not match any documents, the operation will succeed and an UpdateResult // with a MatchedCount of 0 will be returned. // // The update parameter must be a document containing update operators // (https://docs.mongodb.com/manual/reference/operator/update/) and can be used to specify the modifications to be made // to the selected documents. It cannot be nil or empty. // // The opts parameter can be used to specify options for the operation (see the options.UpdateOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/update/. func (coll *Collection) UpdateMany(ctx context.Context, filter interface{}, update interface{}, opts ...*options.UpdateOptions) (*UpdateResult, error) { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return nil, err } return coll.updateOrReplace(ctx, f, update, true, rrMany, true, opts...) } // ReplaceOne executes an update command to replace at most one document in the collection. // // The filter parameter must be a document containing query operators and can be used to select the document to be // replaced. It cannot be nil. If the filter does not match any documents, the operation will succeed and an // UpdateResult with a MatchedCount of 0 will be returned. If the filter matches multiple documents, one will be // selected from the matched set and MatchedCount will equal 1. // // The replacement parameter must be a document that will be used to replace the selected document. It cannot be nil // and cannot contain any update operators (https://docs.mongodb.com/manual/reference/operator/update/). // // The opts parameter can be used to specify options for the operation (see the options.ReplaceOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/update/. func (coll *Collection) ReplaceOne(ctx context.Context, filter interface{}, replacement interface{}, opts ...*options.ReplaceOptions) (*UpdateResult, error) { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return nil, err } r, err := transformBsoncoreDocument(coll.registry, replacement, true, "replacement") if err != nil { return nil, err } if err := ensureNoDollarKey(r); err != nil { return nil, err } updateOptions := make([]*options.UpdateOptions, 0, len(opts)) for _, opt := range opts { uOpts := options.Update() uOpts.BypassDocumentValidation = opt.BypassDocumentValidation uOpts.Collation = opt.Collation uOpts.Upsert = opt.Upsert uOpts.Hint = opt.Hint uOpts.Let = opt.Let updateOptions = append(updateOptions, uOpts) } return coll.updateOrReplace(ctx, f, r, false, rrOne, false, updateOptions...) } // Aggregate executes an aggregate command against the collection and returns a cursor over the resulting documents. // // The pipeline parameter must be an array of documents, each representing an aggregation stage. The pipeline cannot // be nil but can be empty. The stage documents must all be non-nil. For a pipeline of bson.D documents, the // mongo.Pipeline type can be used. See // https://docs.mongodb.com/manual/reference/operator/aggregation-pipeline/#db-collection-aggregate-stages for a list of // valid stages in aggregations. // // The opts parameter can be used to specify options for the operation (see the options.AggregateOptions documentation.) // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/aggregate/. func (coll *Collection) Aggregate(ctx context.Context, pipeline interface{}, opts ...*options.AggregateOptions) (*Cursor, error) { a := aggregateParams{ ctx: ctx, pipeline: pipeline, client: coll.client, registry: coll.registry, readConcern: coll.readConcern, writeConcern: coll.writeConcern, retryRead: coll.client.retryReads, db: coll.db.name, col: coll.name, readSelector: coll.readSelector, writeSelector: coll.writeSelector, readPreference: coll.readPreference, opts: opts, } return aggregate(a) } // aggreate is the helper method for Aggregate func aggregate(a aggregateParams) (cur *Cursor, err error) { if a.ctx == nil { a.ctx = context.Background() } pipelineArr, hasOutputStage, err := transformAggregatePipeline(a.registry, a.pipeline) if err != nil { return nil, err } sess := sessionFromContext(a.ctx) // Always close any created implicit sessions if aggregate returns an error. defer func() { if err != nil && sess != nil { closeImplicitSession(sess) } }() if sess == nil && a.client.sessionPool != nil { sess, err = session.NewClientSession(a.client.sessionPool, a.client.id, session.Implicit) if err != nil { return nil, err } } if err = a.client.validSession(sess); err != nil { return nil, err } var wc *writeconcern.WriteConcern if hasOutputStage { wc = a.writeConcern } rc := a.readConcern if sess.TransactionRunning() { wc = nil rc = nil } if !writeconcern.AckWrite(wc) { closeImplicitSession(sess) sess = nil } selector := makeReadPrefSelector(sess, a.readSelector, a.client.localThreshold) if hasOutputStage { selector = makeOutputAggregateSelector(sess, a.readPreference, a.client.localThreshold) } ao := options.MergeAggregateOptions(a.opts...) cursorOpts := a.client.createBaseCursorOptions() op := operation.NewAggregate(pipelineArr). Session(sess). WriteConcern(wc). ReadConcern(rc). ReadPreference(a.readPreference). CommandMonitor(a.client.monitor). ServerSelector(selector). ClusterClock(a.client.clock). Database(a.db). Collection(a.col). Deployment(a.client.deployment). Crypt(a.client.cryptFLE). ServerAPI(a.client.serverAPI). HasOutputStage(hasOutputStage) if ao.AllowDiskUse != nil { op.AllowDiskUse(*ao.AllowDiskUse) } // ignore batchSize of 0 with $out if ao.BatchSize != nil && !(*ao.BatchSize == 0 && hasOutputStage) { op.BatchSize(*ao.BatchSize) cursorOpts.BatchSize = *ao.BatchSize } if ao.BypassDocumentValidation != nil && *ao.BypassDocumentValidation { op.BypassDocumentValidation(*ao.BypassDocumentValidation) } if ao.Collation != nil { op.Collation(bsoncore.Document(ao.Collation.ToDocument())) } if ao.MaxTime != nil { op.MaxTimeMS(int64(*ao.MaxTime / time.Millisecond)) } if ao.MaxAwaitTime != nil { cursorOpts.MaxTimeMS = int64(*ao.MaxAwaitTime / time.Millisecond) } if ao.Comment != nil { op.Comment(*ao.Comment) } if ao.Hint != nil { hintVal, err := transformValue(a.registry, ao.Hint, false, "hint") if err != nil { return nil, err } op.Hint(hintVal) } if ao.Let != nil { let, err := transformBsoncoreDocument(a.registry, ao.Let, true, "let") if err != nil { return nil, err } op.Let(let) } retry := driver.RetryNone if a.retryRead && !hasOutputStage { retry = driver.RetryOncePerCommand } op = op.Retry(retry) err = op.Execute(a.ctx) if err != nil { if wce, ok := err.(driver.WriteCommandError); ok && wce.WriteConcernError != nil { return nil, *convertDriverWriteConcernError(wce.WriteConcernError) } return nil, replaceErrors(err) } bc, err := op.Result(cursorOpts) if err != nil { return nil, replaceErrors(err) } cursor, err := newCursorWithSession(bc, a.registry, sess) return cursor, replaceErrors(err) } // CountDocuments returns the number of documents in the collection. For a fast count of the documents in the // collection, see the EstimatedDocumentCount method. // // The filter parameter must be a document and can be used to select which documents contribute to the count. It // cannot be nil. An empty document (e.g. bson.D{}) should be used to count all documents in the collection. This will // result in a full collection scan. // // The opts parameter can be used to specify options for the operation (see the options.CountOptions documentation). func (coll *Collection) CountDocuments(ctx context.Context, filter interface{}, opts ...*options.CountOptions) (int64, error) { if ctx == nil { ctx = context.Background() } countOpts := options.MergeCountOptions(opts...) pipelineArr, err := countDocumentsAggregatePipeline(coll.registry, filter, countOpts) if err != nil { return 0, err } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return 0, err } defer sess.EndSession() } if err = coll.client.validSession(sess); err != nil { return 0, err } rc := coll.readConcern if sess.TransactionRunning() { rc = nil } selector := makeReadPrefSelector(sess, coll.readSelector, coll.client.localThreshold) op := operation.NewAggregate(pipelineArr).Session(sess).ReadConcern(rc).ReadPreference(coll.readPreference). CommandMonitor(coll.client.monitor).ServerSelector(selector).ClusterClock(coll.client.clock).Database(coll.db.name). Collection(coll.name).Deployment(coll.client.deployment).Crypt(coll.client.cryptFLE).ServerAPI(coll.client.serverAPI) if countOpts.Collation != nil { op.Collation(bsoncore.Document(countOpts.Collation.ToDocument())) } if countOpts.MaxTime != nil { op.MaxTimeMS(int64(*countOpts.MaxTime / time.Millisecond)) } if countOpts.Hint != nil { hintVal, err := transformValue(coll.registry, countOpts.Hint, false, "hint") if err != nil { return 0, err } op.Hint(hintVal) } retry := driver.RetryNone if coll.client.retryReads { retry = driver.RetryOncePerCommand } op = op.Retry(retry) err = op.Execute(ctx) if err != nil { return 0, replaceErrors(err) } batch := op.ResultCursorResponse().FirstBatch if batch == nil { return 0, errors.New("invalid response from server, no 'firstBatch' field") } docs, err := batch.Documents() if err != nil || len(docs) == 0 { return 0, nil } val, ok := docs[0].Lookup("n").AsInt64OK() if !ok { return 0, errors.New("invalid response from server, no 'n' field") } return val, nil } // EstimatedDocumentCount executes a count command and returns an estimate of the number of documents in the collection // using collection metadata. // // The opts parameter can be used to specify options for the operation (see the options.EstimatedDocumentCountOptions // documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/count/. func (coll *Collection) EstimatedDocumentCount(ctx context.Context, opts ...*options.EstimatedDocumentCountOptions) (int64, error) { if ctx == nil { ctx = context.Background() } sess := sessionFromContext(ctx) var err error if sess == nil && coll.client.sessionPool != nil { sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return 0, err } defer sess.EndSession() } err = coll.client.validSession(sess) if err != nil { return 0, err } rc := coll.readConcern if sess.TransactionRunning() { rc = nil } selector := makeReadPrefSelector(sess, coll.readSelector, coll.client.localThreshold) op := operation.NewCount().Session(sess).ClusterClock(coll.client.clock). Database(coll.db.name).Collection(coll.name).CommandMonitor(coll.client.monitor). Deployment(coll.client.deployment).ReadConcern(rc).ReadPreference(coll.readPreference). ServerSelector(selector).Crypt(coll.client.cryptFLE).ServerAPI(coll.client.serverAPI) co := options.MergeEstimatedDocumentCountOptions(opts...) if co.MaxTime != nil { op = op.MaxTimeMS(int64(*co.MaxTime / time.Millisecond)) } retry := driver.RetryNone if coll.client.retryReads { retry = driver.RetryOncePerCommand } op.Retry(retry) err = op.Execute(ctx) return op.Result().N, replaceErrors(err) } // Distinct executes a distinct command to find the unique values for a specified field in the collection. // // The fieldName parameter specifies the field name for which distinct values should be returned. // // The filter parameter must be a document containing query operators and can be used to select which documents are // considered. It cannot be nil. An empty document (e.g. bson.D{}) should be used to select all documents. // // The opts parameter can be used to specify options for the operation (see the options.DistinctOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/distinct/. func (coll *Collection) Distinct(ctx context.Context, fieldName string, filter interface{}, opts ...*options.DistinctOptions) ([]interface{}, error) { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return nil, err } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return nil, err } defer sess.EndSession() } err = coll.client.validSession(sess) if err != nil { return nil, err } rc := coll.readConcern if sess.TransactionRunning() { rc = nil } selector := makeReadPrefSelector(sess, coll.readSelector, coll.client.localThreshold) option := options.MergeDistinctOptions(opts...) op := operation.NewDistinct(fieldName, f). Session(sess).ClusterClock(coll.client.clock). Database(coll.db.name).Collection(coll.name).CommandMonitor(coll.client.monitor). Deployment(coll.client.deployment).ReadConcern(rc).ReadPreference(coll.readPreference). ServerSelector(selector).Crypt(coll.client.cryptFLE).ServerAPI(coll.client.serverAPI) if option.Collation != nil { op.Collation(bsoncore.Document(option.Collation.ToDocument())) } if option.MaxTime != nil { op.MaxTimeMS(int64(*option.MaxTime / time.Millisecond)) } retry := driver.RetryNone if coll.client.retryReads { retry = driver.RetryOncePerCommand } op = op.Retry(retry) err = op.Execute(ctx) if err != nil { return nil, replaceErrors(err) } arr, ok := op.Result().Values.ArrayOK() if !ok { return nil, fmt.Errorf("response field 'values' is type array, but received BSON type %s", op.Result().Values.Type) } values, err := arr.Values() if err != nil { return nil, err } retArray := make([]interface{}, len(values)) for i, val := range values { raw := bson.RawValue{Type: val.Type, Value: val.Data} err = raw.Unmarshal(&retArray[i]) if err != nil { return nil, err } } return retArray, replaceErrors(err) } // Find executes a find command and returns a Cursor over the matching documents in the collection. // // The filter parameter must be a document containing query operators and can be used to select which documents are // included in the result. It cannot be nil. An empty document (e.g. bson.D{}) should be used to include all documents. // // The opts parameter can be used to specify options for the operation (see the options.FindOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/find/. func (coll *Collection) Find(ctx context.Context, filter interface{}, opts ...*options.FindOptions) (cur *Cursor, err error) { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return nil, err } sess := sessionFromContext(ctx) // Always close any created implicit sessions if Find returns an error. defer func() { if err != nil && sess != nil { closeImplicitSession(sess) } }() if sess == nil && coll.client.sessionPool != nil { var err error sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return nil, err } } err = coll.client.validSession(sess) if err != nil { return nil, err } rc := coll.readConcern if sess.TransactionRunning() { rc = nil } selector := makeReadPrefSelector(sess, coll.readSelector, coll.client.localThreshold) op := operation.NewFind(f). Session(sess).ReadConcern(rc).ReadPreference(coll.readPreference). CommandMonitor(coll.client.monitor).ServerSelector(selector). ClusterClock(coll.client.clock).Database(coll.db.name).Collection(coll.name). Deployment(coll.client.deployment).Crypt(coll.client.cryptFLE).ServerAPI(coll.client.serverAPI) fo := options.MergeFindOptions(opts...) cursorOpts := coll.client.createBaseCursorOptions() if fo.AllowDiskUse != nil { op.AllowDiskUse(*fo.AllowDiskUse) } if fo.AllowPartialResults != nil { op.AllowPartialResults(*fo.AllowPartialResults) } if fo.BatchSize != nil { cursorOpts.BatchSize = *fo.BatchSize op.BatchSize(*fo.BatchSize) } if fo.Collation != nil { op.Collation(bsoncore.Document(fo.Collation.ToDocument())) } if fo.Comment != nil { op.Comment(*fo.Comment) } if fo.CursorType != nil { switch *fo.CursorType { case options.Tailable: op.Tailable(true) case options.TailableAwait: op.Tailable(true) op.AwaitData(true) } } if fo.Hint != nil { hint, err := transformValue(coll.registry, fo.Hint, false, "hint") if err != nil { return nil, err } op.Hint(hint) } if fo.Let != nil { let, err := transformBsoncoreDocument(coll.registry, fo.Let, true, "let") if err != nil { return nil, err } op.Let(let) } if fo.Limit != nil { limit := *fo.Limit if limit < 0 { limit = -1 * limit op.SingleBatch(true) } cursorOpts.Limit = int32(limit) op.Limit(limit) } if fo.Max != nil { max, err := transformBsoncoreDocument(coll.registry, fo.Max, true, "max") if err != nil { return nil, err } op.Max(max) } if fo.MaxAwaitTime != nil { cursorOpts.MaxTimeMS = int64(*fo.MaxAwaitTime / time.Millisecond) } if fo.MaxTime != nil { op.MaxTimeMS(int64(*fo.MaxTime / time.Millisecond)) } if fo.Min != nil { min, err := transformBsoncoreDocument(coll.registry, fo.Min, true, "min") if err != nil { return nil, err } op.Min(min) } if fo.NoCursorTimeout != nil { op.NoCursorTimeout(*fo.NoCursorTimeout) } if fo.OplogReplay != nil { op.OplogReplay(*fo.OplogReplay) } if fo.Projection != nil { proj, err := transformBsoncoreDocument(coll.registry, fo.Projection, true, "projection") if err != nil { return nil, err } op.Projection(proj) } if fo.ReturnKey != nil { op.ReturnKey(*fo.ReturnKey) } if fo.ShowRecordID != nil { op.ShowRecordID(*fo.ShowRecordID) } if fo.Skip != nil { op.Skip(*fo.Skip) } if fo.Snapshot != nil { op.Snapshot(*fo.Snapshot) } if fo.Sort != nil { sort, err := transformBsoncoreDocument(coll.registry, fo.Sort, false, "sort") if err != nil { return nil, err } op.Sort(sort) } retry := driver.RetryNone if coll.client.retryReads { retry = driver.RetryOncePerCommand } op = op.Retry(retry) if err = op.Execute(ctx); err != nil { return nil, replaceErrors(err) } bc, err := op.Result(cursorOpts) if err != nil { return nil, replaceErrors(err) } return newCursorWithSession(bc, coll.registry, sess) } // FindOne executes a find command and returns a SingleResult for one document in the collection. // // The filter parameter must be a document containing query operators and can be used to select the document to be // returned. It cannot be nil. If the filter does not match any documents, a SingleResult with an error set to // ErrNoDocuments will be returned. If the filter matches multiple documents, one will be selected from the matched set. // // The opts parameter can be used to specify options for this operation (see the options.FindOneOptions documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/find/. func (coll *Collection) FindOne(ctx context.Context, filter interface{}, opts ...*options.FindOneOptions) *SingleResult { if ctx == nil { ctx = context.Background() } findOpts := make([]*options.FindOptions, 0, len(opts)) for _, opt := range opts { findOpts = append(findOpts, &options.FindOptions{ AllowPartialResults: opt.AllowPartialResults, BatchSize: opt.BatchSize, Collation: opt.Collation, Comment: opt.Comment, CursorType: opt.CursorType, Hint: opt.Hint, Max: opt.Max, MaxAwaitTime: opt.MaxAwaitTime, MaxTime: opt.MaxTime, Min: opt.Min, NoCursorTimeout: opt.NoCursorTimeout, OplogReplay: opt.OplogReplay, Projection: opt.Projection, ReturnKey: opt.ReturnKey, ShowRecordID: opt.ShowRecordID, Skip: opt.Skip, Snapshot: opt.Snapshot, Sort: opt.Sort, }) } // Unconditionally send a limit to make sure only one document is returned and the cursor is not kept open // by the server. findOpts = append(findOpts, options.Find().SetLimit(-1)) cursor, err := coll.Find(ctx, filter, findOpts...) return &SingleResult{cur: cursor, reg: coll.registry, err: replaceErrors(err)} } func (coll *Collection) findAndModify(ctx context.Context, op *operation.FindAndModify) *SingleResult { if ctx == nil { ctx = context.Background() } sess := sessionFromContext(ctx) var err error if sess == nil && coll.client.sessionPool != nil { sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return &SingleResult{err: err} } defer sess.EndSession() } err = coll.client.validSession(sess) if err != nil { return &SingleResult{err: err} } wc := coll.writeConcern if sess.TransactionRunning() { wc = nil } if !writeconcern.AckWrite(wc) { sess = nil } selector := makePinnedSelector(sess, coll.writeSelector) retry := driver.RetryNone if coll.client.retryWrites { retry = driver.RetryOnce } op = op.Session(sess). WriteConcern(wc). CommandMonitor(coll.client.monitor). ServerSelector(selector). ClusterClock(coll.client.clock). Database(coll.db.name). Collection(coll.name). Deployment(coll.client.deployment). Retry(retry). Crypt(coll.client.cryptFLE) _, err = processWriteError(op.Execute(ctx)) if err != nil { return &SingleResult{err: err} } return &SingleResult{rdr: bson.Raw(op.Result().Value), reg: coll.registry} } // FindOneAndDelete executes a findAndModify command to delete at most one document in the collection. and returns the // document as it appeared before deletion. // // The filter parameter must be a document containing query operators and can be used to select the document to be // deleted. It cannot be nil. If the filter does not match any documents, a SingleResult with an error set to // ErrNoDocuments wil be returned. If the filter matches multiple documents, one will be selected from the matched set. // // The opts parameter can be used to specify options for the operation (see the options.FindOneAndDeleteOptions // documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/findAndModify/. func (coll *Collection) FindOneAndDelete(ctx context.Context, filter interface{}, opts ...*options.FindOneAndDeleteOptions) *SingleResult { f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return &SingleResult{err: err} } fod := options.MergeFindOneAndDeleteOptions(opts...) op := operation.NewFindAndModify(f).Remove(true).ServerAPI(coll.client.serverAPI) if fod.Collation != nil { op = op.Collation(bsoncore.Document(fod.Collation.ToDocument())) } if fod.MaxTime != nil { op = op.MaxTimeMS(int64(*fod.MaxTime / time.Millisecond)) } if fod.Projection != nil { proj, err := transformBsoncoreDocument(coll.registry, fod.Projection, true, "projection") if err != nil { return &SingleResult{err: err} } op = op.Fields(proj) } if fod.Sort != nil { sort, err := transformBsoncoreDocument(coll.registry, fod.Sort, false, "sort") if err != nil { return &SingleResult{err: err} } op = op.Sort(sort) } if fod.Hint != nil { hint, err := transformValue(coll.registry, fod.Hint, false, "hint") if err != nil { return &SingleResult{err: err} } op = op.Hint(hint) } if fod.Let != nil { let, err := transformBsoncoreDocument(coll.registry, fod.Let, true, "let") if err != nil { return &SingleResult{err: err} } op = op.Let(let) } return coll.findAndModify(ctx, op) } // FindOneAndReplace executes a findAndModify command to replace at most one document in the collection // and returns the document as it appeared before replacement. // // The filter parameter must be a document containing query operators and can be used to select the document to be // replaced. It cannot be nil. If the filter does not match any documents, a SingleResult with an error set to // ErrNoDocuments wil be returned. If the filter matches multiple documents, one will be selected from the matched set. // // The replacement parameter must be a document that will be used to replace the selected document. It cannot be nil // and cannot contain any update operators (https://docs.mongodb.com/manual/reference/operator/update/). // // The opts parameter can be used to specify options for the operation (see the options.FindOneAndReplaceOptions // documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/findAndModify/. func (coll *Collection) FindOneAndReplace(ctx context.Context, filter interface{}, replacement interface{}, opts ...*options.FindOneAndReplaceOptions) *SingleResult { f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return &SingleResult{err: err} } r, err := transformBsoncoreDocument(coll.registry, replacement, true, "replacement") if err != nil { return &SingleResult{err: err} } if firstElem, err := r.IndexErr(0); err == nil && strings.HasPrefix(firstElem.Key(), "$") { return &SingleResult{err: errors.New("replacement document cannot contain keys beginning with '$'")} } fo := options.MergeFindOneAndReplaceOptions(opts...) op := operation.NewFindAndModify(f).Update(bsoncore.Value{Type: bsontype.EmbeddedDocument, Data: r}). ServerAPI(coll.client.serverAPI) if fo.BypassDocumentValidation != nil && *fo.BypassDocumentValidation { op = op.BypassDocumentValidation(*fo.BypassDocumentValidation) } if fo.Collation != nil { op = op.Collation(bsoncore.Document(fo.Collation.ToDocument())) } if fo.MaxTime != nil { op = op.MaxTimeMS(int64(*fo.MaxTime / time.Millisecond)) } if fo.Projection != nil { proj, err := transformBsoncoreDocument(coll.registry, fo.Projection, true, "projection") if err != nil { return &SingleResult{err: err} } op = op.Fields(proj) } if fo.ReturnDocument != nil { op = op.NewDocument(*fo.ReturnDocument == options.After) } if fo.Sort != nil { sort, err := transformBsoncoreDocument(coll.registry, fo.Sort, false, "sort") if err != nil { return &SingleResult{err: err} } op = op.Sort(sort) } if fo.Upsert != nil { op = op.Upsert(*fo.Upsert) } if fo.Hint != nil { hint, err := transformValue(coll.registry, fo.Hint, false, "hint") if err != nil { return &SingleResult{err: err} } op = op.Hint(hint) } if fo.Let != nil { let, err := transformBsoncoreDocument(coll.registry, fo.Let, true, "let") if err != nil { return &SingleResult{err: err} } op = op.Let(let) } return coll.findAndModify(ctx, op) } // FindOneAndUpdate executes a findAndModify command to update at most one document in the collection and returns the // document as it appeared before updating. // // The filter parameter must be a document containing query operators and can be used to select the document to be // updated. It cannot be nil. If the filter does not match any documents, a SingleResult with an error set to // ErrNoDocuments wil be returned. If the filter matches multiple documents, one will be selected from the matched set. // // The update parameter must be a document containing update operators // (https://docs.mongodb.com/manual/reference/operator/update/) and can be used to specify the modifications to be made // to the selected document. It cannot be nil or empty. // // The opts parameter can be used to specify options for the operation (see the options.FindOneAndUpdateOptions // documentation). // // For more information about the command, see https://docs.mongodb.com/manual/reference/command/findAndModify/. func (coll *Collection) FindOneAndUpdate(ctx context.Context, filter interface{}, update interface{}, opts ...*options.FindOneAndUpdateOptions) *SingleResult { if ctx == nil { ctx = context.Background() } f, err := transformBsoncoreDocument(coll.registry, filter, true, "filter") if err != nil { return &SingleResult{err: err} } fo := options.MergeFindOneAndUpdateOptions(opts...) op := operation.NewFindAndModify(f).ServerAPI(coll.client.serverAPI) u, err := transformUpdateValue(coll.registry, update, true) if err != nil { return &SingleResult{err: err} } op = op.Update(u) if fo.ArrayFilters != nil { filtersDoc, err := fo.ArrayFilters.ToArrayDocument() if err != nil { return &SingleResult{err: err} } op = op.ArrayFilters(bsoncore.Document(filtersDoc)) } if fo.BypassDocumentValidation != nil && *fo.BypassDocumentValidation { op = op.BypassDocumentValidation(*fo.BypassDocumentValidation) } if fo.Collation != nil { op = op.Collation(bsoncore.Document(fo.Collation.ToDocument())) } if fo.MaxTime != nil { op = op.MaxTimeMS(int64(*fo.MaxTime / time.Millisecond)) } if fo.Projection != nil { proj, err := transformBsoncoreDocument(coll.registry, fo.Projection, true, "projection") if err != nil { return &SingleResult{err: err} } op = op.Fields(proj) } if fo.ReturnDocument != nil { op = op.NewDocument(*fo.ReturnDocument == options.After) } if fo.Sort != nil { sort, err := transformBsoncoreDocument(coll.registry, fo.Sort, false, "sort") if err != nil { return &SingleResult{err: err} } op = op.Sort(sort) } if fo.Upsert != nil { op = op.Upsert(*fo.Upsert) } if fo.Hint != nil { hint, err := transformValue(coll.registry, fo.Hint, false, "hint") if err != nil { return &SingleResult{err: err} } op = op.Hint(hint) } if fo.Let != nil { let, err := transformBsoncoreDocument(coll.registry, fo.Let, true, "let") if err != nil { return &SingleResult{err: err} } op = op.Let(let) } return coll.findAndModify(ctx, op) } // Watch returns a change stream for all changes on the corresponding collection. See // https://docs.mongodb.com/manual/changeStreams/ for more information about change streams. // // The Collection must be configured with read concern majority or no read concern for a change stream to be created // successfully. // // The pipeline parameter must be an array of documents, each representing a pipeline stage. The pipeline cannot be // nil but can be empty. The stage documents must all be non-nil. See https://docs.mongodb.com/manual/changeStreams/ for // a list of pipeline stages that can be used with change streams. For a pipeline of bson.D documents, the // mongo.Pipeline{} type can be used. // // The opts parameter can be used to specify options for change stream creation (see the options.ChangeStreamOptions // documentation). func (coll *Collection) Watch(ctx context.Context, pipeline interface{}, opts ...*options.ChangeStreamOptions) (*ChangeStream, error) { csConfig := changeStreamConfig{ readConcern: coll.readConcern, readPreference: coll.readPreference, client: coll.client, registry: coll.registry, streamType: CollectionStream, collectionName: coll.Name(), databaseName: coll.db.Name(), crypt: coll.client.cryptFLE, } return newChangeStream(ctx, csConfig, pipeline, opts...) } // Indexes returns an IndexView instance that can be used to perform operations on the indexes for the collection. func (coll *Collection) Indexes() IndexView { return IndexView{coll: coll} } // Drop drops the collection on the server. This method ignores "namespace not found" errors so it is safe to drop // a collection that does not exist on the server. func (coll *Collection) Drop(ctx context.Context) error { if ctx == nil { ctx = context.Background() } sess := sessionFromContext(ctx) if sess == nil && coll.client.sessionPool != nil { var err error sess, err = session.NewClientSession(coll.client.sessionPool, coll.client.id, session.Implicit) if err != nil { return err } defer sess.EndSession() } err := coll.client.validSession(sess) if err != nil { return err } wc := coll.writeConcern if sess.TransactionRunning() { wc = nil } if !writeconcern.AckWrite(wc) { sess = nil } selector := makePinnedSelector(sess, coll.writeSelector) op := operation.NewDropCollection(). Session(sess).WriteConcern(wc).CommandMonitor(coll.client.monitor). ServerSelector(selector).ClusterClock(coll.client.clock). Database(coll.db.name).Collection(coll.name). Deployment(coll.client.deployment).Crypt(coll.client.cryptFLE). ServerAPI(coll.client.serverAPI) err = op.Execute(ctx) // ignore namespace not found erorrs driverErr, ok := err.(driver.Error) if !ok || (ok && !driverErr.NamespaceNotFound()) { return replaceErrors(err) } return nil } // makePinnedSelector makes a selector for a pinned session with a pinned server. Will attempt to do server selection on // the pinned server but if that fails it will go through a list of default selectors func makePinnedSelector(sess *session.Client, defaultSelector description.ServerSelector) description.ServerSelectorFunc { return func(t description.Topology, svrs []description.Server) ([]description.Server, error) { if sess != nil && sess.PinnedServer != nil { // If there is a pinned server, try to find it in the list of candidates. for _, candidate := range svrs { if candidate.Addr == sess.PinnedServer.Addr { return []description.Server{candidate}, nil } } return nil, nil } return defaultSelector.SelectServer(t, svrs) } } func makeReadPrefSelector(sess *session.Client, selector description.ServerSelector, localThreshold time.Duration) description.ServerSelectorFunc { if sess != nil && sess.TransactionRunning() { selector = description.CompositeSelector([]description.ServerSelector{ description.ReadPrefSelector(sess.CurrentRp), description.LatencySelector(localThreshold), }) } return makePinnedSelector(sess, selector) } func makeOutputAggregateSelector(sess *session.Client, rp *readpref.ReadPref, localThreshold time.Duration) description.ServerSelectorFunc { if sess != nil && sess.TransactionRunning() { // Use current transaction's read preference if available rp = sess.CurrentRp } selector := description.CompositeSelector([]description.ServerSelector{ description.OutputAggregateSelector(rp), description.LatencySelector(localThreshold), }) return makePinnedSelector(sess, selector) }
Request: nil, })
nextGreaterElement.ts
const exc = (nums: number[], l: number, r: number) => { [nums[l], nums[r]] = [nums[r], nums[l]]; }; const findLeft = (nums: number[]) => { for (let i = nums.length - 2; i >= 0; i--) { if (nums[i] < nums[i + 1]) return i; } return -1; }; const findRight = (nums: number[], left: number) => { for (let i = nums.length - 1; i > left; i--) { if (nums[i] > nums[left]) return i; } throw new Error("never"); }; const reverse = ( nums: number[], from: number, to: number ): void => { for (let l = from, r = to; l < r; l++, r--) { exc(nums, l, r); } }; const MAX_32INT = 2 ** 31 - 1; function
(n: number): number { const nums = String(n).split("").map(Number); const left = findLeft(nums); if (left === -1) return -1; const right = findRight(nums, left); exc(nums, left, right); reverse(nums, left + 1, nums.length - 1); const ret = Number(nums.join("")); if (ret > MAX_32INT) return -1; return ret; }
nextGreaterElement
db.py
""" # db module - database adapter functions """ from sqlalchemy import DateTime, TypeDecorator # pylint: disable=abstract-method class
(TypeDecorator): ''' Results returned as offset-aware datetimes. ''' impl = DateTime # pylint: disable=unused-argument def process_result_value(self, value, dialect): """ set UTC time zone with processing value """ return value.replace(tzinfo=pytz.utc)
DateTimeUtc
index.js
/** * * @export * @param {any} time 输入需要转化的时间参数 * @param {any} format 需要转化的格式 * @returns 返回处理后的时间格式 */ export function dateFormat(times, format) { // let time = Date.parse(times); let time = new Date(times) const between = (Date.now() - Number(time)) / 1000 if (between < 3600 && ((between / 60) < 1)) { return '刚刚' } else if (between < 3600) { return pluralize(~~(between / 60), '分钟') } else if (between < 86400) { return pluralize(~~(between / 3600), '小时') } if (!time) return null; var date = new Date(time); var o = { 'M+': date.getMonth() + 1, // month 'd+': date.getDate(), // day 'h+': date.getHours(), // hour 'm+': date.getMinutes(), // minute 's+': date.getSeconds(), // second 'q+': Math.floor((date.getMonth() + 3) / 3), // quarter S: date.getMilliseconds() // millisecond }; if (/(y+)/.test(format)) { format = format.replace(RegExp.$1, (date.getFullYear() + '').substr(4 - RegExp.$1.length)); } for (var k in o) { if (new RegExp('(' + k + ')').test(format)) { format = format.replace(RegExp.$1, RegExp.$1.length === 1 ? o[k] : ('00' + o[k]).substr(('' + o[k]).length)); } } return format; } function pluralize(time, label) { return time + label + '以前'; } // 文字长度 export function text(text, length = text.lenth) { const cansub = text && text.length > length; return cansub ? text.substr(0, length) + '...' : text; } /** * Parse the time to string * @param {(Object|string|number)} time * @param {string} cFormat * @returns {string} */ export function parseTime(time, cFormat) { if (arguments.length === 0) { return null } const format = cFormat || '{y}-{m}-{d} {h}:{i}:{s}' let date if(!time) return null if (typeof time === 'object') { date = time } else { if ((typeof time === 'string') && (/^[0-9]+$/.test(time))) { time = parseInt(time) } if ((typeof time === 'number') && (time.toString().length === 10)) { time = time * 1000 } date = new Date(time) } const formatObj = { y: date.getFullYear(), m: date.getMonth() + 1, d: date.getDate(), h: date.getHours(), i: date.getMinutes(), s: date.getSeconds(), a: date.getDay() } const time_str = format.replace(/{(y|m|d|h|i|s|a)+}/g, (result, key) => { let value = formatObj[key] // Note: getDay() returns 0 on Sunday if (key === 'a') { return ['日', '一', '二', '三', '四', '五', '六'][value ] } if (result.length > 0 && value < 10) { value = '0' + value } return value || 0 }) return time_str } /** * @param {number} time * @param {string} option * @returns {string} */ export function formatTime(time, option) { if(typeof time === 'string') { } if (('' + time).length === 10) {
time = +time } const d = new Date(time) const now = Date.now() const diff = (now - d) / 1000 if (diff < 30) { return '刚刚' } else if (diff < 3600) { // less 1 hour return Math.ceil(diff / 60) + '分钟前' } else if (diff < 3600 * 24) { return Math.ceil(diff / 3600) + '小时前' } else if (diff < 3600 * 24 * 2) { return '1天前' } if (option) { return parseTime(time, option) } else { return ( d.getMonth() + 1 + '月' + d.getDate() + '日' + d.getHours() + '时' + d.getMinutes() + '分' ) } } /** * @param {string} url * @returns {Object} */ export function getQueryObject(url) { url = url == null ? window.location.href : url const search = url.substring(url.lastIndexOf('?') + 1) const obj = {} const reg = /([^?&=]+)=([^?&=]*)/g search.replace(reg, (rs, $1, $2) => { const name = decodeURIComponent($1) let val = decodeURIComponent($2) val = String(val) obj[name] = val return rs }) return obj } /** * @param {string} input value * @returns {number} output value */ export function byteLength(str) { // returns the byte length of an utf8 string let s = str.length for (var i = str.length - 1; i >= 0; i--) { const code = str.charCodeAt(i) if (code > 0x7f && code <= 0x7ff) s++ else if (code > 0x7ff && code <= 0xffff) s += 2 if (code >= 0xDC00 && code <= 0xDFFF) i-- } return s } /** * @param {Array} actual * @returns {Array} */ export function cleanArray(actual) { const newArray = [] for (let i = 0; i < actual.length; i++) { if (actual[i]) { newArray.push(actual[i]) } } return newArray } /** * @param {Object} json * @returns {Array} */ export function param(json) { if (!json) return '' return cleanArray( Object.keys(json).map(key => { if (json[key] === undefined) return '' return encodeURIComponent(key) + '=' + encodeURIComponent(json[key]) }) ).join('&') } /** * @param {string} url * @returns {Object} */ export function param2Obj(url) { const search = url.split('?')[1] if (!search) { return {} } return JSON.parse( '{"' + decodeURIComponent(search) .replace(/"/g, '\\"') .replace(/&/g, '","') .replace(/=/g, '":"') .replace(/\+/g, ' ') + '"}' ) } /** * @param {string} val * @returns {string} */ export function html2Text(val) { const div = document.createElement('div') div.innerHTML = val return div.textContent || div.innerText } /** * Merges two objects, giving the last one precedence * @param {Object} target * @param {(Object|Array)} source * @returns {Object} */ export function objectMerge(target, source) { if (typeof target !== 'object') { target = {} } if (Array.isArray(source)) { return source.slice() } Object.keys(source).forEach(property => { const sourceProperty = source[property] if (typeof sourceProperty === 'object') { target[property] = objectMerge(target[property], sourceProperty) } else { target[property] = sourceProperty } }) return target } /** * @param {HTMLElement} element * @param {string} className */ export function toggleClass(element, className) { if (!element || !className) { return } let classString = element.className const nameIndex = classString.indexOf(className) if (nameIndex === -1) { classString += '' + className } else { classString = classString.substr(0, nameIndex) + classString.substr(nameIndex + className.length) } element.className = classString } /** * @param {string} type * @returns {Date} */ export function getTime(type) { if (type === 'start') { return new Date().getTime() - 3600 * 1000 * 24 * 90 } else { return new Date(new Date().toDateString()) } } /** * @param {Function} func * @param {number} wait * @param {boolean} immediate * @return {*} */ export function debounce(func, wait, immediate) { let timeout, args, context, timestamp, result const later = function() { // 据上一次触发时间间隔 const last = +new Date() - timestamp // 上次被包装函数被调用时间间隔 last 小于设定时间间隔 wait if (last < wait && last > 0) { timeout = setTimeout(later, wait - last) } else { timeout = null // 如果设定为immediate===true,因为开始边界已经调用过了此处无需调用 if (!immediate) { result = func.apply(context, args) if (!timeout) context = args = null } } } return function(...args) { context = this timestamp = +new Date() const callNow = immediate && !timeout // 如果延时不存在,重新设定延时 if (!timeout) timeout = setTimeout(later, wait) if (callNow) { result = func.apply(context, args) context = args = null } return result } } /** * This is just a simple version of deep copy * Has a lot of edge cases bug * If you want to use a perfect deep copy, use lodash's _.cloneDeep * @param {Object} source * @returns {Object} */ export function deepClone(source) { if (!source && typeof source !== 'object') { throw new Error('error arguments', 'deepClone') } const targetObj = source.constructor === Array ? [] : {} Object.keys(source).forEach(keys => { if (source[keys] && typeof source[keys] === 'object') { targetObj[keys] = deepClone(source[keys]) } else { targetObj[keys] = source[keys] } }) return targetObj } /** * @param {Array} arr * @returns {Array} */ export function uniqueArr(arr) { return Array.from(new Set(arr)) } /** * @returns {string} */ export function createUniqueString() { const timestamp = +new Date() + '' const randomNum = parseInt((1 + Math.random()) * 65536) + '' return (+(randomNum + timestamp)).toString(32) } /** * Check if an element has a class * @param {HTMLElement} elm * @param {string} cls * @returns {boolean} */ export function hasClass(ele, cls) { return !!ele.className.match(new RegExp('(\\s|^)' + cls + '(\\s|$)')) } /** * Add class to element * @param {HTMLElement} elm * @param {string} cls */ export function addClass(ele, cls) { if (!hasClass(ele, cls)) ele.className += ' ' + cls } /** * Remove class from element * @param {HTMLElement} elm * @param {string} cls */ export function removeClass(ele, cls) { if (hasClass(ele, cls)) { const reg = new RegExp('(\\s|^)' + cls + '(\\s|$)') ele.className = ele.className.replace(reg, ' ') } }
time = parseInt(time) * 1000 } else {
test_function_input_output.py
""" Copyright 2020 The OneFlow Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest import numpy as np import oneflow as flow import oneflow.typing as oft import oneflow._oneflow_internal from typing import Tuple @flow.unittest.skip_unless_1n4d() class TestFunctionInputOutput(flow.unittest.TestCase): def test_FixedTensorDef(test_case): @flow.global_function() def Foo(x: oft.Numpy.Placeholder((2, 5))): return x data = np.ones((2, 5), dtype=np.float32) of_ret = Foo(data).get() test_case.assertEqual(of_ret.numpy().max(), 1) test_case.assertEqual(of_ret.numpy().min(), 1) test_case.assertTrue(np.allclose(of_ret.numpy(), data)) def test_FixedTensorDef_2_device(test_case): flow.config.gpu_device_num(2) @flow.global_function() def Foo(x: oft.Numpy.Placeholder((2, 5))): return x data = np.ones((2, 5), dtype=np.float32) of_ret = Foo(data).get() test_case.assertEqual(of_ret.numpy().max(), 1) test_case.assertEqual(of_ret.numpy().min(), 1) test_case.assertTrue(np.allclose(of_ret.numpy(), data)) def test_MirroredTensorDef(test_case): func_config = flow.FunctionConfig() func_config.default_logical_view(flow.scope.mirrored_view()) @flow.global_function(function_config=func_config) def Foo(x: oft.ListNumpy.Placeholder((2, 5))): return x data = np.ones((1, 5), dtype=np.float32) ndarray_list = Foo([data]).get().numpy_list() test_case.assertEqual(len(ndarray_list), 1) test_case.assertTrue(np.allclose(ndarray_list[0], data)) if __name__ == "__main__": unittest.main()
Unless required by applicable law or agreed to in writing, software
with_env.rs
use crate::prelude::*; use nu_engine::run_block; use nu_engine::WholeStreamCommand; use nu_errors::ShellError; use nu_protocol::{ hir::CapturedBlock, Signature, SpannedTypeName, SyntaxShape, UntaggedValue, Value, }; pub struct WithEnv; #[derive(Deserialize, Debug)] struct WithEnvArgs { variable: Value, block: CapturedBlock, } impl WholeStreamCommand for WithEnv { fn name(&self) -> &str { "with-env" } fn signature(&self) -> Signature { Signature::build("with-env") .required( "variable", SyntaxShape::Any, "the environment variable to temporarily set", ) .required( "block", SyntaxShape::Block, "the block to run once the variable is set", ) } fn usage(&self) -> &str { "Runs a block with an environment variable set." } fn run_with_actions(&self, args: CommandArgs) -> Result<ActionStream, ShellError> { with_env(args) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "Set the MYENV environment variable", example: r#"with-env [MYENV "my env value"] { echo $nu.env.MYENV }"#, result: Some(vec![Value::from("my env value")]), }, Example { description: "Set by primitive value list", example: r#"with-env [X Y W Z] { echo $nu.env.X $nu.env.W }"#, result: Some(vec![Value::from("Y"), Value::from("Z")]), }, Example { description: "Set by single row table", example: r#"with-env [[X W]; [Y Z]] { echo $nu.env.X $nu.env.W }"#, result: Some(vec![Value::from("Y"), Value::from("Z")]), }, Example { description: "Set by row(e.g. `open x.json` or `from json`)", example: r#"echo '{"X":"Y","W":"Z"}'|from json|with-env $it { echo $nu.env.X $nu.env.W }"#, result: None, }, ] } } fn with_env(args: CommandArgs) -> Result<ActionStream, ShellError> { let external_redirection = args.call_info.args.external_redirection; let context = &args.context; let variable: Value = args.req(0)?; let block: CapturedBlock = args.req(1)?; let mut env = IndexMap::new(); match &variable.value { UntaggedValue::Table(table) => { if table.len() == 1 { // single row([[X W]; [Y Z]]) for (k, v) in table[0].row_entries() { env.insert(k.clone(), v.convert_to_string()); } } else { // primitive values([X Y W Z]) for row in table.chunks(2) { if row.len() == 2 && row[0].is_primitive() && row[1].is_primitive() { env.insert(row[0].convert_to_string(), row[1].convert_to_string()); } } } } // when get object by `open x.json` or `from json` UntaggedValue::Row(row) => { for (k, v) in &row.entries { env.insert(k.clone(), v.convert_to_string()); } } _ => { return Err(ShellError::type_error( "string list or single row", variable.spanned_type_name(), )); } }; context.scope.enter_scope(); context.scope.add_env(env); context.scope.add_vars(&block.captured.entries); let result = run_block(&block.block, context, args.input, external_redirection); context.scope.exit_scope();
#[cfg(test)] mod tests { use super::ShellError; use super::WithEnv; #[test] fn examples_work_as_expected() -> Result<(), ShellError> { use crate::examples::test as test_examples; test_examples(WithEnv {}) } }
result.map(|x| x.into_action_stream()) }
bip85-validate-routing.module.ts
import { NgModule } from '@angular/core' import { Routes, RouterModule } from '@angular/router' import { Bip85ValidatePage } from './bip85-validate.page' const routes: Routes = [ { path: '', component: Bip85ValidatePage } ] @NgModule({ imports: [RouterModule.forChild(routes)],
exports: [RouterModule] }) export class Bip85ValidatePageRoutingModule {}
OrderHistory.js
import React from "react"; // import { Link } from 'react-router-dom'; import { useQuery } from "@apollo/client"; import { QUERY_USER } from "../utils/queries"; import { Container, Table, Row, Col } from "react-bootstrap"; function OrderHistory() { const { data } = useQuery(QUERY_USER); let user; if (data) { user = data.user; } return ( <Container> <Row className="mt-5 mb-5"> <Col> {user ? ( <> <p> Hello, {user.firstName} {user.lastName}. Thank you for shopping with us. </p> <h3>Order History</h3> {user.orders.map((order) => ( <div key={order._id}> {order.products.map( ({ _id, image, name, price, description }, index) => ( <div key={index}> <Table responsive bordered > <thead> <tr> <th>Order date</th> <th>Product image</th> <th>Product name</th> <th>Product description</th> <th>Price</th> </tr> </thead> <tbody> <tr> <td> {new Date( parseInt(order.purchaseDate) ).toLocaleDateString()} </td> <td> <img className="orderHistoryimg" alt={name} src={`/images/${image}`} /> </td> <td>{name}</td> <td>{description}</td>
</tr> </tbody> </Table> </div> ) )} </div> ))} </> ) : null} </Col> </Row> </Container> ); } export default OrderHistory;
<td>${price}</td>
scrabble_score.go
package scrabble const testVersion = 5 // Score calculates score of a string func Score(input string) int
{ result := 0 for _, v := range input { switch v { case 'A', 'E', 'I', 'O', 'U', 'L', 'N', 'R', 'S', 'T': fallthrough case 'a', 'e', 'i', 'o', 'u', 'l', 'n', 'r', 's', 't': result++ case 'D', 'G': fallthrough case 'd', 'g': result += 2 case 'B', 'C', 'M', 'P': fallthrough case 'b', 'c', 'm', 'p': result += 3 case 'F', 'H', 'V', 'W', 'Y': fallthrough case 'f', 'h', 'v', 'w', 'y': result += 4 case 'K': fallthrough case 'k': result += 5 case 'J', 'X': fallthrough case 'j', 'x': result += 8 case 'Q', 'Z': fallthrough case 'q', 'z': result += 10 } } return result }
test_EOD_api.py
import os import re import datetime import unittest from io import StringIO from unittest.mock import patch import pandas as pd import EOD_api as eod TOKEN = os.environ["EOD_TOKEN"] def date_parser(string): date_pattern = re.compile("([0-9]{4}-[0-9]{2}-[0-9]{2})[ ]", re.VERBOSE) return date_pattern.sub(r"\1T", string) class TestGetEod(unittest.TestCase): # @classmethod # def setUp(cls): # pass # def tearDown(cls): # pass def test_idempotent__addtickers(self): d1 = eod.OhlcvIntraday( ["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17", intraday_frec="5m" ).add_tickers(["MSFT.US"]) d2 = ( eod.OhlcvIntraday( ["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17", intraday_frec="5m" ) .add_tickers(["MSFT.US"]) .add_tickers(["MSFT.US"]) ) self.assertEqual(d1, d2) def test_idempotent_truncate_dates(self): d1 = eod.Fundamental( ["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17" ).truncate_dates("2020-10-14", "2020-10-16") d2 = ( eod.Fundamental(["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17") .truncate_dates("2020-10-14", "2020-10-16") .truncate_dates("2020-10-14", "2020-10-16") ) self.assertEqual(d1, d2) def test_idempotent_remove_tickers(self):
def test_add_remove(self): d1 = eod.OhlcvIntraday(["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17", "1m") d2 = ( eod.OhlcvIntraday(["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17", "1m") .add_tickers(["MSFT.US"]) .remove_tickers(["MSFT.US"]) ) self.assertEqual(d1, d2) def test_remove_all_tickers(self): with self.assertRaises(Exception): eod.Ohlcv(["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17").remove_tickers( ["AAPL.US"] ).retrieve_data() def test_misspelled_input(self): with self.assertRaises(Exception): eod.OhlcvIntraday( ["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17", intraday_frec="Daoly" ) def test_ohlcv_data_format_hasnt_changed( self, ): # Cambiar de antes de formatting a después de formatting expected_aapl = pd.read_csv( StringIO( """ Date Open High Low Close Adjusted_close Volume 2020-10-13 125.27 125.390 119.65 121.10 120.7110 262330500.0 2020-10-14 121.00 123.030 119.62 121.19 120.8008 151062297.0 2020-10-15 118.72 121.200 118.15 120.71 120.3223 112559203.0 2020-10-16 121.28 121.548 118.81 119.02 118.6377 115393797.0 275 NaN NaN NaN NaN NaN NaN """ ), sep="\\s+", ) url = "https://eodhistoricaldata.com/api/eod/AAPL.US?api_token={}&from=2020-10-13&to=2020-10-17&period=d".format( TOKEN ) actual = pd.read_csv( url, usecols=[ "Date", "Volume", "Open", "Close", "High", "Low", "Adjusted_close", ], ) with patch.object(pd, "read_csv") as mock_read: mock_read.autospec = True mock_read.return_value = expected_aapl expected = pd.read_csv( url, usecols=[ "Date", "Volume", "Open", "Close", "High", "Low", "Adjusted_close", ], ) pd.testing.assert_frame_equal(actual, expected, rtol=5e-3) def test_index_formatting(self): expected_aapl = pd.read_csv( StringIO( """ Date Open High Low Close Adjusted_close Volume 2020-10-13 125.27 125.390 119.65 121.10 120.7110 262330500.0 2020-10-14 121.00 123.030 119.62 121.19 120.8008 151062297.0 2020-10-15 118.72 121.200 118.15 120.71 120.3223 112559203.0 2020-10-16 121.28 121.548 118.81 119.02 118.6377 115393797.0 275 NaN NaN NaN NaN NaN NaN """ ), sep="\\s+", ) expected_aapl_formatted = pd.read_csv( StringIO( date_parser( """ Stock Date Open High Low Close Adjusted_close Volume AAPL.US 2020-10-13 00:00:00+00:00 125.27 125.390 119.65 121.10 120.7110 262330500.0 AAPL.US 2020-10-14 00:00:00+00:00 121.00 123.030 119.62 121.19 120.8008 151062297.0 AAPL.US 2020-10-15 00:00:00+00:00 118.72 121.200 118.15 120.71 120.3223 112559203.0 AAPL.US 2020-10-16 00:00:00+00:00 121.28 121.548 118.81 119.02 118.6377 115393797.0 """ ) ), sep="\\s+", index_col=[0, 1], converters={"Date": lambda col: datetime.datetime.fromisoformat(col)}, ) with patch.object(pd, "read_csv") as mock_read: mock_read.autospec = True mock_read.return_value = expected_aapl formatted_mock = eod.Ohlcv( ["AAPL.US"], TOKEN, "2020-10-13", "2020-10-17" ).retrieve_data() pd.testing.assert_frame_equal( formatted_mock, expected_aapl_formatted, rtol=5e-3 ) # TODO? Write more tests: # Check that the data is concated/merged/joined properly, particularly when the indexes come with Nans # Check except clauses # Check duplicate df values # Assert errors with wrong args # etc # expected_ohlcv_concatted = pd.read_csv( StringIO( date_parser( """ # Stock Date Gmtoffset Datetime Open High Low Close Volume Returns # BP.LSE 2020-10-13 00:00:00+00:00 NaN NaN NaN NaN NaN NaN NaN NaN # BP.LSE 2020-10-14 00:00:00+00:00 0.0 2020-10-13 15:25:00 213.649993 214.000000 213.550003 213.856994 1210380.0 -0.001601 # BP.LSE 2020-10-15 00:00:00+00:00 0.0 2020-10-14 15:25:00 213.000000 213.149993 212.600006 212.649993 1182246.0 0.019660 # BP.LSE 2020-10-16 00:00:00+00:00 0.0 2020-10-15 15:25:00 207.149993 207.199996 206.500000 206.850006 1626720.0 -0.013826 # AAPL.US 2020-10-13 00:00:00+00:00 NaN NaN NaN NaN NaN NaN NaN NaN # AAPL.US 2020-10-14 00:00:00+00:00 0.0 2020-10-13 19:55:00 121.139999 121.279998 121.029998 121.050003 4585723.0 0.003648 # AAPL.US 2020-10-15 00:00:00+00:00 0.0 2020-10-14 19:55:00 121.580001 121.709999 121.139999 121.180000 3420583.0 0.015419 # AAPL.US 2020-10-16 00:00:00+00:00 0.0 2020-10-15 19:55:00 120.790000 120.849998 120.580001 120.699996 3436603.0 -0.003550 # MSFT.US 2020-10-13 00:00:00+00:00 NaN NaN NaN NaN NaN NaN NaN NaN # MSFT.US 2020-10-14 00:00:00+00:00 0.0 2020-10-13 19:55:00 223.320007 223.389999 222.750000 222.830001 1457493.0 0.000651 # MSFT.US 2020-10-15 00:00:00+00:00 0.0 2020-10-14 19:55:00 221.199996 221.414993 220.600006 220.759994 1122912.0 0.012377 # MSFT.US 2020-10-16 00:00:00+00:00 0.0 2020-10-15 19:55:00 219.639999 219.880004 219.490005 219.660003 1201342.0 -0.003900 # """ ) ), sep="\\s+", index_col=[0,1,2], converters = {'Date' : lambda col: datetime.datetime.fromisoformat( col ) \ # , 'Datetime' : lambda col: pd.to_datetime(col, format='%Y-%m-%dT%H:%M:%S', utc=True) } ) if __name__ == "__main__": unittest.main()
d1 = eod.Fundamental( ["AAPL.US", "MSFT.US"], TOKEN, "2020-10-13", "2020-10-17" ).remove_tickers(["MSFT.US"]) d2 = ( eod.Fundamental(["AAPL.US", "MSFT.US"], TOKEN, "2020-10-13", "2020-10-17") .remove_tickers(["MSFT.US"]) .remove_tickers(["MSFT.US"]) ) self.assertEqual(d1, d2)
commits_test.go
package git import ( "bytes" "context" "fmt" "strings" "testing" "time" "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/require" "github.com/sourcegraph/sourcegraph/internal/actor" "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/authz" "github.com/sourcegraph/sourcegraph/internal/database" "github.com/sourcegraph/sourcegraph/internal/gitserver" "github.com/sourcegraph/sourcegraph/internal/gitserver/gitdomain" "github.com/sourcegraph/sourcegraph/lib/errors" ) var ( fileWithAccess = "file-with-access" fileWithoutAccess = "file-without-access" ) func TestLogPartsPerCommitInSync(t *testing.T) { require.Equal(t, 2*partsPerCommitBasic, strings.Count(logFormatWithoutRefs, "%"), "Expected (2 * %0d) %% signs in log format string (%0d fields, %0d %%x00 separators)", partsPerCommitBasic) } func TestRepository_GetCommit(t *testing.T) { ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() gitCommands := []string{ "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m foo --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit --allow-empty -m bar --author='a <[email protected]>' --date 2006-01-02T15:04:06Z", } gitCommandsWithFiles := getGitCommandsWithFiles(fileWithAccess, fileWithoutAccess) oldRunCommitLog := runCommitLog type testCase struct { repo api.RepoName id api.CommitID wantCommit *gitdomain.Commit noEnsureRevision bool revisionNotFoundError bool } runGetCommitTests := func(checker authz.SubRepoPermissionChecker, tests map[string]testCase) { for label, test := range tests { t.Run(label, func(t *testing.T) { var noEnsureRevision bool t.Cleanup(func() { runCommitLog = oldRunCommitLog }) runCommitLog = func(ctx context.Context, cmd *gitserver.Cmd, opt CommitsOptions) ([]*wrappedCommit, error) { // Track the value of NoEnsureRevision we pass to gitserver noEnsureRevision = opt.NoEnsureRevision return oldRunCommitLog(ctx, cmd, opt) } resolveRevisionOptions := ResolveRevisionOptions{ NoEnsureRevision: test.noEnsureRevision, } commit, err := GetCommit(ctx, db, test.repo, test.id, resolveRevisionOptions, checker) if err != nil { if test.revisionNotFoundError { if !errors.HasType(err, &gitdomain.RevisionNotFoundError{}) { t.Errorf("%s: GetCommit: expected a RevisionNotFoundError, got %s", label, err) } return } t.Errorf("%s: GetCommit: %s", label, err) } if !CommitsEqual(commit, test.wantCommit) { t.Errorf("%s: got commit == %+v, want %+v", label, commit, test.wantCommit) return } // Test that trying to get a nonexistent commit returns RevisionNotFoundError. if _, err := GetCommit(ctx, db, test.repo, NonExistentCommitID, resolveRevisionOptions, checker); !errors.HasType(err, &gitdomain.RevisionNotFoundError{}) { t.Errorf("%s: for nonexistent commit: got err %v, want RevisionNotFoundError", label, err) } if noEnsureRevision != test.noEnsureRevision { t.Fatalf("Expected %t, got %t", test.noEnsureRevision, noEnsureRevision) } }) } } wantGitCommit := &gitdomain.Commit{ ID: "b266c7e3ca00b1a17ad0b1449825d0854225c007", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:06Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")}, Message: "bar", Parents: []api.CommitID{"ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8"}, } tests := map[string]testCase{ "git cmd with NoEnsureRevision false": { repo: MakeGitRepository(t, gitCommands...), id: "b266c7e3ca00b1a17ad0b1449825d0854225c007", wantCommit: wantGitCommit, noEnsureRevision: false, }, "git cmd with NoEnsureRevision true": { repo: MakeGitRepository(t, gitCommands...), id: "b266c7e3ca00b1a17ad0b1449825d0854225c007", wantCommit: wantGitCommit, noEnsureRevision: true, }, } // Run basic tests w/o sub-repo permissions checker runGetCommitTests(nil, tests) checker := getTestSubRepoPermsChecker(fileWithoutAccess) // Add test cases with file names for sub-repo permissions testing tests["with sub-repo permissions and access to file"] = testCase{ repo: MakeGitRepository(t, gitCommandsWithFiles...), id: "da50eed82c8ff3c17bb642000d8aad9d434283c1", wantCommit: &gitdomain.Commit{ ID: "da50eed82c8ff3c17bb642000d8aad9d434283c1", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "commit1", }, noEnsureRevision: true, } tests["with sub-repo permissions and NO access to file"] = testCase{ repo: MakeGitRepository(t, gitCommandsWithFiles...), id: "ee7773505e98390e809cbf518b2a92e4748b0187", wantCommit: &gitdomain.Commit{}, noEnsureRevision: true, revisionNotFoundError: true, } // Run test w/ sub-repo permissions filtering runGetCommitTests(checker, tests) } func TestRepository_HasCommitAfter(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() testCases := []struct { label string commitDates []string after string revspec string want, wantSubRepoTest bool }{ { label: "after specific date", commitDates: []string{ "2006-01-02T15:04:05Z", "2007-01-02T15:04:05Z", "2008-01-02T15:04:05Z", }, after: "2006-01-02T15:04:05Z", revspec: "master", want: true, wantSubRepoTest: true, }, { label: "after 1 year ago", commitDates: []string{ "2016-01-02T15:04:05Z", "2017-01-02T15:04:05Z", "2017-01-02T15:04:06Z", }, after: "1 year ago", revspec: "master", want: false, wantSubRepoTest: false, }, { label: "after too recent date", commitDates: []string{ "2006-01-02T15:04:05Z", "2007-01-02T15:04:05Z", "2008-01-02T15:04:05Z", }, after: "2010-01-02T15:04:05Z", revspec: "HEAD", want: false, wantSubRepoTest: false, }, { label: "commit 1 second after", commitDates: []string{ "2006-01-02T15:04:05Z", "2007-01-02T15:04:05Z", "2007-01-02T15:04:06Z", }, after: "2007-01-02T15:04:05Z", revspec: "HEAD", want: true, wantSubRepoTest: false, }, { label: "after 10 years ago", commitDates: []string{ "2016-01-02T15:04:05Z", "2017-01-02T15:04:05Z", "2017-01-02T15:04:06Z", }, after: "10 years ago", revspec: "HEAD", want: true, wantSubRepoTest: true, }, } t.Run("basic", func(t *testing.T) { for _, tc := range testCases { t.Run(tc.label, func(t *testing.T) { gitCommands := make([]string, len(tc.commitDates)) for i, date := range tc.commitDates { gitCommands[i] = fmt.Sprintf("GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=%s git commit --allow-empty -m foo --author='a <[email protected]>'", date) } repo := MakeGitRepository(t, gitCommands...) got, err := HasCommitAfter(ctx, db, repo, tc.after, tc.revspec, nil) if err != nil || got != tc.want { t.Errorf("got %t hascommitafter, want %t", got, tc.want) } }) } }) t.Run("with sub-repo permissions", func(t *testing.T) { for _, tc := range testCases { t.Run(tc.label, func(t *testing.T) { gitCommands := make([]string, len(tc.commitDates)) for i, date := range tc.commitDates { fileName := fmt.Sprintf("file%d", i) gitCommands = append(gitCommands, fmt.Sprintf("touch %s", fileName), fmt.Sprintf("git add %s", fileName)) gitCommands = append(gitCommands, fmt.Sprintf("GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=%s git commit -m commit%d --author='a <[email protected]>'", date, i)) } // Case where user can't view commit 2, but can view commits 0 and 1. In each test case the result should match the case where no sub-repo perms enabled checker := getTestSubRepoPermsChecker("file2") repo := MakeGitRepository(t, gitCommands...) got, err := HasCommitAfter(ctx, db, repo, tc.after, tc.revspec, checker) if err != nil { t.Errorf("got error: %s", err) } if got != tc.want { t.Errorf("got %t hascommitafter, want %t", got, tc.want) } // Case where user can't view commit 1 or commit 2, which will mean in some cases since HasCommitAfter will be false due to those commits not being visible. checker = getTestSubRepoPermsChecker("file1", "file2") repo = MakeGitRepository(t, gitCommands...) got, err = HasCommitAfter(ctx, db, repo, tc.after, tc.revspec, checker) if err != nil { t.Errorf("got error: %s", err) } if got != tc.wantSubRepoTest { t.Errorf("got %t hascommitafter, want %t", got, tc.wantSubRepoTest) } }) } }) } func TestRepository_FirstEverCommit(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() testCases := []struct { commitDates []string want string }{ { commitDates: []string{ "2006-01-02T15:04:05Z", "2007-01-02T15:04:05Z", "2008-01-02T15:04:05Z", }, want: "2006-01-02T15:04:05Z", }, { commitDates: []string{ "2007-01-02T15:04:05Z", // Don't think this is possible, but if it is we still want the first commit (not strictly "oldest") "2006-01-02T15:04:05Z", "2007-01-02T15:04:06Z", }, want: "2007-01-02T15:04:05Z", }, } t.Run("basic", func(t *testing.T) { for _, tc := range testCases { gitCommands := make([]string, len(tc.commitDates)) for i, date := range tc.commitDates { gitCommands[i] = fmt.Sprintf("GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=%s git commit --allow-empty -m foo --author='a <[email protected]>'", date) } repo := MakeGitRepository(t, gitCommands...) gotCommit, err := FirstEverCommit(ctx, db, repo, nil) if err != nil { t.Fatal(err) } got := gotCommit.Committer.Date.Format(time.RFC3339) if got != tc.want { t.Errorf("got %q, want %q", got, tc.want) } } }) t.Run("with sub-repo permissions", func(t *testing.T) { checkerWithoutAccessFirstCommit := getTestSubRepoPermsChecker("file0") checkerWithAccessFirstCommit := getTestSubRepoPermsChecker("file1") for _, tc := range testCases { gitCommands := make([]string, 0, len(tc.commitDates)) for i, date := range tc.commitDates { fileName := fmt.Sprintf("file%d", i) gitCommands = append(gitCommands, fmt.Sprintf("touch %s", fileName)) gitCommands = append(gitCommands, fmt.Sprintf("git add %s", fileName)) gitCommands = append(gitCommands, fmt.Sprintf("GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=%s git commit -m foo --author='a <[email protected]>'", date)) } repo := MakeGitRepository(t, gitCommands...) // Try to get first commit when user doesn't have permission to view _, err := FirstEverCommit(ctx, db, repo, checkerWithoutAccessFirstCommit) if !errors.HasType(err, &gitdomain.RevisionNotFoundError{}) { t.Errorf("expected a RevisionNotFoundError since the user does not have access to view this commit, got :%s", err) } // Try to get first commit when user does have permission to view, should succeed gotCommit, err := FirstEverCommit(ctx, db, repo, checkerWithAccessFirstCommit) if err != nil { t.Fatal(err) } got := gotCommit.Committer.Date.Format(time.RFC3339) if got != tc.want { t.Errorf("got %q, want %q", got, tc.want) } // Internal actor should always have access and ignore sub-repo permissions newCtx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, Internal: true, }) gotCommit, err = FirstEverCommit(newCtx, db, repo, checkerWithoutAccessFirstCommit) if err != nil { t.Fatal(err) } got = gotCommit.Committer.Date.Format(time.RFC3339) if got != tc.want { t.Errorf("got %q, want %q", got, tc.want) } } }) } func TestHead(t *testing.T) { t.Parallel() t.Run("basic", func(t *testing.T) { gitCommands := []string{ "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m foo --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", } repo := MakeGitRepository(t, gitCommands...) ctx := context.Background() head, exists, err := Head(ctx, database.NewMockDB(), repo, nil) if err != nil { t.Fatal(err) } wantHead := "ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8" if head != wantHead { t.Fatalf("Want %q, got %q", wantHead, head) } if !exists { t.Fatal("Should exist") } }) t.Run("with sub-repo permissions", func(t *testing.T) { db := database.NewMockDB() gitCommands := []string{ "touch file", "git add file", "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m foo --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", } repo := MakeGitRepository(t, gitCommands...) ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) checker := getTestSubRepoPermsChecker("file") // call Head() when user doesn't have access to view the commit _, exists, err := Head(ctx, db, repo, checker) if err != nil { t.Fatal(err) } if exists { t.Fatalf("exists should be false since the user doesn't have access to view the commit") } readAllChecker := getTestSubRepoPermsChecker() // call Head() when user has access to view the commit; should return expected commit head, exists, err := Head(ctx, db, repo, readAllChecker) if err != nil { t.Fatal(err) } wantHead := "46619ad353dbe4ed4108ebde9aa59ef676994a0b" if head != wantHead { t.Fatalf("Want %q, got %q", wantHead, head) } if !exists { t.Fatal("Should exist") } }) } func TestCommitExists(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() testCommitExists := func(label string, gitCommands []string, commitID, nonExistentCommitID api.CommitID, checker authz.SubRepoPermissionChecker) { t.Run(label, func(t *testing.T) { repo := MakeGitRepository(t, gitCommands...) exists, err := CommitExists(ctx, db, repo, commitID, checker) if err != nil { t.Fatal(err) } if !exists { t.Fatal("Should exist") } exists, err = CommitExists(ctx, db, repo, nonExistentCommitID, checker) if err != nil { t.Fatal(err) } if exists { t.Fatal("Should not exist") } }) } gitCommands := []string{ "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m foo --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", } testCommitExists("basic", gitCommands, "ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8", NonExistentCommitID, nil) gitCommandsWithFiles := getGitCommandsWithFiles(fileWithAccess, fileWithoutAccess) commitIDWithAccess := api.CommitID("da50eed82c8ff3c17bb642000d8aad9d434283c1") commitIDWithoutAccess := api.CommitID("ee7773505e98390e809cbf518b2a92e4748b0187") // Test that the commit ID the user has access to exists, and CommitExists returns false for the commit ID the user // doesn't have access to (since a file was modified in the commit that the user doesn't have permissions to view) testCommitExists("with sub-repo permissions filtering", gitCommandsWithFiles, commitIDWithAccess, commitIDWithoutAccess, getTestSubRepoPermsChecker(fileWithoutAccess)) } func TestRepository_Commits(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) // TODO(sqs): test CommitsOptions.Base gitCommands := []string{ "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m foo --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit --allow-empty -m bar --author='a <[email protected]>' --date 2006-01-02T15:04:06Z", } wantGitCommits := []*gitdomain.Commit{ { ID: "b266c7e3ca00b1a17ad0b1449825d0854225c007", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:06Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")}, Message: "bar", Parents: []api.CommitID{"ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8"}, }, { ID: "ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "foo", Parents: nil, }, } tests := map[string]struct { repo api.RepoName id api.CommitID wantCommits []*gitdomain.Commit wantTotal uint }{ "git cmd": { repo: MakeGitRepository(t, gitCommands...), id: "b266c7e3ca00b1a17ad0b1449825d0854225c007", wantCommits: wantGitCommits, wantTotal: 2, }, } runCommitsTests := func(checker authz.SubRepoPermissionChecker) { for label, test := range tests { t.Run(label, func(t *testing.T) { testCommits(ctx, label, test.repo, CommitsOptions{Range: string(test.id)}, checker, test.wantTotal, test.wantCommits, t) // Test that trying to get a nonexistent commit returns RevisionNotFoundError. if _, err := Commits(ctx, database.NewMockDB(), test.repo, CommitsOptions{Range: string(NonExistentCommitID)}, nil); !errors.HasType(err, &gitdomain.RevisionNotFoundError{}) { t.Errorf("%s: for nonexistent commit: got err %v, want RevisionNotFoundError", label, err) } }) } } runCommitsTests(nil) checker := getTestSubRepoPermsChecker() runCommitsTests(checker) } func TestCommits_SubRepoPerms(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) gitCommands := []string{ "touch file1", "git add file1", "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m commit1 --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "touch file2", "git add file2", "touch file2.2", "git add file2.2", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit -m commit2 --author='a <[email protected]>' --date 2006-01-02T15:04:06Z", "touch file3", "git add file3", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit -m commit3 --author='a <[email protected]>' --date 2006-01-02T15:04:07Z", } tests := map[string]struct { repo api.RepoName wantCommits []*gitdomain.Commit opt CommitsOptions wantTotal uint noAccessPaths []string }{ "if no read perms on file should filter out commit": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 1, wantCommits: []*gitdomain.Commit{ { ID: "d38233a79e037d2ab8170b0d0bc0aa438473e6da", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "commit1", }, }, noAccessPaths: []string{"file2", "file3"}, }, "sub-repo perms with path (w/ no access) specified should return no commits": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 1, opt: CommitsOptions{ Path: "file2", }, wantCommits: []*gitdomain.Commit{}, noAccessPaths: []string{"file2", "file3"}, }, "sub-repo perms with path (w/ access) specified should return that commit": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 1, opt: CommitsOptions{ Path: "file1", }, wantCommits: []*gitdomain.Commit{ { ID: "d38233a79e037d2ab8170b0d0bc0aa438473e6da", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "commit1", }, }, noAccessPaths: []string{"file2", "file3"}, }, } for label, test := range tests { t.Run(label, func(t *testing.T) { checker := getTestSubRepoPermsChecker(test.noAccessPaths...) commits, err := Commits(ctx, database.NewMockDB(), test.repo, test.opt, checker) if err != nil { t.Errorf("%s: Commits(): %s", label, err) return } if len(commits) != len(test.wantCommits) { t.Errorf("%s: got %d commits, want %d", label, len(commits), len(test.wantCommits)) } checkCommits(t, label, commits, test.wantCommits) }) } } func TestCommits_SubRepoPerms_ReturnNCommits(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) gitCommands := []string{ "touch file1", "git add file1", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:01Z git commit -m commit1 --author='a <[email protected]>' --date 2006-01-02T15:04:01Z", "touch file2", "git add file2", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:02Z git commit -m commit2 --author='a <[email protected]>' --date 2006-01-02T15:04:02Z", "echo foo > file1", "git add file1", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:03Z git commit -m commit3 --author='a <[email protected]>' --date 2006-01-02T15:04:03Z", "echo asdf > file1", "git add file1", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:04Z git commit -m commit4 --author='a <[email protected]>' --date 2006-01-02T15:04:04Z", "echo bar > file1", "git add file1", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m commit5 --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "echo asdf2 > file2", "git add file2", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:06Z git commit -m commit6 --author='a <[email protected]>' --date 2006-01-02T15:04:06Z", "echo bazz > file1", "git add file1", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit -m commit7 --author='a <[email protected]>' --date 2006-01-02T15:04:07Z", "echo bazz > file2", "git add file2", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:08Z git commit -m commit8 --author='a <[email protected]>' --date 2006-01-02T15:04:08Z", } tests := map[string]struct { repo api.RepoName wantCommits []*gitdomain.Commit opt CommitsOptions wantTotal uint noAccessPaths []string }{ "return the requested number of commits": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 3, opt: CommitsOptions{ N: 3, }, wantCommits: []*gitdomain.Commit{ { ID: "61dbc35f719c53810904a2d359309d4e1e98a6be", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")}, Message: "commit7", Parents: []api.CommitID{"66566c8aa223f3e1b94ebe09e6cdb14c3a5bfb36"}, }, { ID: "2e6b2c94293e9e339f781b2a2f7172e15460f88c", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Parents: []api.CommitID{ "9a7ec70986d657c4c86d6ac476f0c5181ece509a", }, Message: "commit5", }, { ID: "9a7ec70986d657c4c86d6ac476f0c5181ece509a", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:04Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:04Z")}, Message: "commit4", Parents: []api.CommitID{ "f3fa8cf6ec56d0469402523385d6ca4b7cb222d8", }, }, }, noAccessPaths: []string{"file2"}, }, } for label, test := range tests { t.Run(label, func(t *testing.T) { checker := getTestSubRepoPermsChecker(test.noAccessPaths...) commits, err := Commits(ctx, database.NewMockDB(), test.repo, test.opt, checker) if err != nil { t.Errorf("%s: Commits(): %s", label, err) return } if diff := cmp.Diff(test.wantCommits, commits); diff != "" { t.Fatal(diff) } }) } } func TestRepository_Commits_options(t *testing.T) { t.Parallel() ctx := context.Background() gitCommands := []string{ "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m foo --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit --allow-empty -m bar --author='a <[email protected]>' --date 2006-01-02T15:04:06Z", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:08Z git commit --allow-empty -m qux --author='a <[email protected]>' --date 2006-01-02T15:04:08Z", } wantGitCommits := []*gitdomain.Commit{ { ID: "b266c7e3ca00b1a17ad0b1449825d0854225c007", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:06Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")}, Message: "bar", Parents: []api.CommitID{"ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8"}, }, } wantGitCommits2 := []*gitdomain.Commit{ { ID: "ade564eba4cf904492fb56dcd287ac633e6e082c", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:08Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:08Z")}, Message: "qux", Parents: []api.CommitID{"b266c7e3ca00b1a17ad0b1449825d0854225c007"}, }, } tests := map[string]struct { repo api.RepoName opt CommitsOptions wantCommits []*gitdomain.Commit wantTotal uint }{ "git cmd": { repo: MakeGitRepository(t, gitCommands...), opt: CommitsOptions{Range: "ade564eba4cf904492fb56dcd287ac633e6e082c", N: 1, Skip: 1}, wantCommits: wantGitCommits, wantTotal: 1, }, "git cmd Head": { repo: MakeGitRepository(t, gitCommands...), opt: CommitsOptions{ Range: "b266c7e3ca00b1a17ad0b1449825d0854225c007...ade564eba4cf904492fb56dcd287ac633e6e082c", }, wantCommits: wantGitCommits2, wantTotal: 1, }, "before": { repo: MakeGitRepository(t, gitCommands...), opt: CommitsOptions{ Before: "2006-01-02T15:04:07Z", Range: "HEAD", N: 1, }, wantCommits: []*gitdomain.Commit{ { ID: "b266c7e3ca00b1a17ad0b1449825d0854225c007", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:06Z")}, Committer: &gitdomain.Signature{Name: "c", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")}, Message: "bar", Parents: []api.CommitID{"ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8"}, }, }, wantTotal: 1, }, } runCommitsTests := func(checker authz.SubRepoPermissionChecker) { for label, test := range tests { t.Run(label, func(t *testing.T) { testCommits(ctx, label, test.repo, test.opt, checker, test.wantTotal, test.wantCommits, t) }) } } runCommitsTests(nil) checker := getTestSubRepoPermsChecker() runCommitsTests(checker) } func TestRepository_Commits_options_path(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) gitCommands := []string{ "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m commit1 --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "touch file1", "touch --date=2006-01-02T15:04:05Z file1 || touch -t " + Times[0] + " file1", "git add file1", "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m commit2 --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", "GIT_COMMITTER_NAME=c [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit --allow-empty -m commit3 --author='a <[email protected]>' --date 2006-01-02T15:04:06Z", } wantGitCommits := []*gitdomain.Commit{ { ID: "546a3ef26e581624ef997cb8c0ba01ee475fc1dc", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "commit2", Parents: []api.CommitID{"a04652fa1998a0a7d2f2f77ecb7021de943d3aab"}, }, } tests := map[string]struct { repo api.RepoName opt CommitsOptions wantCommits []*gitdomain.Commit wantTotal uint }{ "git cmd Path 0": { repo: MakeGitRepository(t, gitCommands...), opt: CommitsOptions{ Range: "master", Path: "doesnt-exist", }, wantCommits: nil, wantTotal: 0, }, "git cmd Path 1": { repo: MakeGitRepository(t, gitCommands...), opt: CommitsOptions{ Range: "master", Path: "file1", }, wantCommits: wantGitCommits, wantTotal: 1, }, } runCommitsTest := func(checker authz.SubRepoPermissionChecker) { for label, test := range tests { t.Run(label, func(t *testing.T) { testCommits(ctx, label, test.repo, test.opt, checker, test.wantTotal, test.wantCommits, t) }) } } runCommitsTest(nil) checker := getTestSubRepoPermsChecker() runCommitsTest(checker) } func TestMessage(t *testing.T)
func TestParseCommitsUniqueToBranch(t *testing.T) { commits, err := parseCommitsUniqueToBranch([]string{ "c165bfff52e9d4f87891bba497e3b70fea144d89:2020-08-04T08:23:30-05:00", "f73ee8ed601efea74f3b734eeb073307e1615606:2020-04-16T16:06:21-04:00", "6057f7ed8d331c82030c713b650fc8fd2c0c2347:2020-04-16T16:20:26-04:00", "7886287b8758d1baf19cf7b8253856128369a2a7:2020-04-16T16:55:58-04:00", "b69f89473bbcc04dc52cafaf6baa504e34791f5a:2020-04-20T12:10:49-04:00", "172b7fcf8b8c49b37b231693433586c2bfd1619e:2020-04-20T12:37:36-04:00", "5bc35c78fb5fb388891ca944cd12d85fd6dede95:2020-05-05T12:53:18-05:00", }) if err != nil { t.Fatalf("unexpected error parsing commits: %s", err) } expectedCommits := map[string]time.Time{ "c165bfff52e9d4f87891bba497e3b70fea144d89": *mustParseDate("2020-08-04T08:23:30-05:00", t), "f73ee8ed601efea74f3b734eeb073307e1615606": *mustParseDate("2020-04-16T16:06:21-04:00", t), "6057f7ed8d331c82030c713b650fc8fd2c0c2347": *mustParseDate("2020-04-16T16:20:26-04:00", t), "7886287b8758d1baf19cf7b8253856128369a2a7": *mustParseDate("2020-04-16T16:55:58-04:00", t), "b69f89473bbcc04dc52cafaf6baa504e34791f5a": *mustParseDate("2020-04-20T12:10:49-04:00", t), "172b7fcf8b8c49b37b231693433586c2bfd1619e": *mustParseDate("2020-04-20T12:37:36-04:00", t), "5bc35c78fb5fb388891ca944cd12d85fd6dede95": *mustParseDate("2020-05-05T12:53:18-05:00", t), } if diff := cmp.Diff(expectedCommits, commits); diff != "" { t.Errorf("unexpected commits (-want +got):\n%s", diff) } } func TestParseBranchesContaining(t *testing.T) { names := parseBranchesContaining([]string{ "refs/tags/v0.7.0", "refs/tags/v0.5.1", "refs/tags/v1.1.4", "refs/heads/symbols", "refs/heads/bl/symbols", "refs/tags/v1.2.0", "refs/tags/v1.1.0", "refs/tags/v0.10.0", "refs/tags/v1.0.0", "refs/heads/garo/index-specific-files", "refs/heads/bl/symbols-2", "refs/tags/v1.3.1", "refs/tags/v0.5.2", "refs/tags/v1.1.2", "refs/tags/v0.8.0", "refs/heads/ef/wtf", "refs/tags/v1.5.0", "refs/tags/v0.9.0", "refs/heads/garo/go-and-typescript-lsif-indexing", "refs/heads/master", "refs/heads/sg/document-symbols", "refs/tags/v1.1.1", "refs/tags/v1.4.0", "refs/heads/nsc/bump-go-version", "refs/heads/nsc/random", "refs/heads/nsc/markupcontent", "refs/tags/v0.6.0", "refs/tags/v1.1.3", "refs/tags/v0.5.3", "refs/tags/v1.3.0", }) expectedNames := []string{ "bl/symbols", "bl/symbols-2", "ef/wtf", "garo/go-and-typescript-lsif-indexing", "garo/index-specific-files", "master", "nsc/bump-go-version", "nsc/markupcontent", "nsc/random", "sg/document-symbols", "symbols", "v0.10.0", "v0.5.1", "v0.5.2", "v0.5.3", "v0.6.0", "v0.7.0", "v0.8.0", "v0.9.0", "v1.0.0", "v1.1.0", "v1.1.1", "v1.1.2", "v1.1.3", "v1.1.4", "v1.2.0", "v1.3.0", "v1.3.1", "v1.4.0", "v1.5.0", } if diff := cmp.Diff(expectedNames, names); diff != "" { t.Errorf("unexpected names (-want +got):\n%s", diff) } } func TestParseRefDescriptions(t *testing.T) { refDescriptions, err := parseRefDescriptions(bytes.Join([][]byte{ []byte("66a7ac584740245fc523da443a3f540a52f8af72\x00refs/heads/bl/symbols\x00 \x002021-01-18T16:46:51-08:00"), []byte("58537c06cf7ba8a562a3f5208fb7a8efbc971d0e\x00refs/heads/bl/symbols-2\x00 \x002021-02-24T06:21:20-08:00"), []byte("a40716031ae97ee7c5cdf1dec913567a4a7c50c8\x00refs/heads/ef/wtf\x00 \x002021-02-10T10:50:08-06:00"), []byte("e2e283fdaf6ea4a419cdbad142bbfd4b730080f8\x00refs/heads/garo/go-and-typescript-lsif-indexing\x00 \x002020-04-29T16:45:46+00:00"), []byte("c485d92c3d2065041bf29b3fe0b55ffac7e66b2a\x00refs/heads/garo/index-specific-files\x00 \x002021-03-01T13:09:42-08:00"), []byte("ce30aee6cc56f39d0ac6fee03c4c151c08a8cd2e\x00refs/heads/master\x00*\x002021-06-16T11:51:09-07:00"), []byte("ec5cfc8ab33370c698273b1a097af73ea289c92b\x00refs/heads/nsc/bump-go-version\x00 \x002021-03-12T22:33:17+00:00"), []byte("22b2c4f734f62060cae69da856fe3854defdcc87\x00refs/heads/nsc/markupcontent\x00 \x002021-05-03T23:50:02+01:00"), []byte("9df3358a18792fa9dbd40d506f2e0ad23fc11ee8\x00refs/heads/nsc/random\x00 \x002021-02-10T16:29:06+00:00"), []byte("a02b85b63345a1406d7a19727f7a5472c976e053\x00refs/heads/sg/document-symbols\x00 \x002021-04-08T15:33:03-07:00"), []byte("234b0a484519129b251164ecb0674ec27d154d2f\x00refs/heads/symbols\x00 \x002021-01-01T22:51:55-08:00"), []byte("6b5ae2e0ce568a7641174072271d109d7d0977c7\x00refs/tags/v0.0.0\x00 \x00"), []byte("c165bfff52e9d4f87891bba497e3b70fea144d89\x00refs/tags/v0.10.0\x00 \x002020-08-04T08:23:30-05:00"), []byte("f73ee8ed601efea74f3b734eeb073307e1615606\x00refs/tags/v0.5.1\x00 \x002020-04-16T16:06:21-04:00"), []byte("6057f7ed8d331c82030c713b650fc8fd2c0c2347\x00refs/tags/v0.5.2\x00 \x002020-04-16T16:20:26-04:00"), []byte("7886287b8758d1baf19cf7b8253856128369a2a7\x00refs/tags/v0.5.3\x00 \x002020-04-16T16:55:58-04:00"), []byte("b69f89473bbcc04dc52cafaf6baa504e34791f5a\x00refs/tags/v0.6.0\x00 \x002020-04-20T12:10:49-04:00"), []byte("172b7fcf8b8c49b37b231693433586c2bfd1619e\x00refs/tags/v0.7.0\x00 \x002020-04-20T12:37:36-04:00"), []byte("5bc35c78fb5fb388891ca944cd12d85fd6dede95\x00refs/tags/v0.8.0\x00 \x002020-05-05T12:53:18-05:00"), []byte("14faa49ef098df9488536ca3c9b26d79e6bec4d6\x00refs/tags/v0.9.0\x00 \x002020-07-14T14:26:40-05:00"), []byte("0a82af8b6914d8c81326eee5f3a7e1d1106547f1\x00refs/tags/v1.0.0\x00 \x002020-08-19T19:33:39-05:00"), []byte("262defb72b96261a7d56b000d438c5c7ec6d0f3e\x00refs/tags/v1.1.0\x00 \x002020-08-21T14:15:44-05:00"), []byte("806b96eb544e7e632a617c26402eccee6d67faed\x00refs/tags/v1.1.1\x00 \x002020-08-21T16:02:35-05:00"), []byte("5d8865d6feacb4fce3313cade2c61dc29c6271e6\x00refs/tags/v1.1.2\x00 \x002020-08-22T13:45:26-05:00"), []byte("8c45a5635cf0a4968cc8c9dac2d61c388b53251e\x00refs/tags/v1.1.3\x00 \x002020-08-25T10:10:46-05:00"), []byte("fc212da31ce157ef0795e934381509c5a50654f6\x00refs/tags/v1.1.4\x00 \x002020-08-26T14:02:47-05:00"), []byte("4fd8b2c3522df32ffc8be983d42c3a504cc75fbc\x00refs/tags/v1.2.0\x00 \x002020-09-07T09:52:43-05:00"), []byte("9741f54aa0f14be1103b00c89406393ea4d8a08a\x00refs/tags/v1.3.0\x00 \x002021-02-10T23:21:31+00:00"), []byte("b358977103d2d66e2a3fc5f8081075c2834c4936\x00refs/tags/v1.3.1\x00 \x002021-02-24T20:16:45+00:00"), []byte("2882ad236da4b649b4c1259d815bf1a378e3b92f\x00refs/tags/v1.4.0\x00 \x002021-05-13T10:41:02-05:00"), []byte("340b84452286c18000afad9b140a32212a82840a\x00refs/tags/v1.5.0\x00 \x002021-05-20T18:41:41-05:00"), }, []byte("\n"))) if err != nil { t.Fatalf("unexpected error parsing ref descriptions: %s", err) } makeBranch := func(name, createdDate string, isDefaultBranch bool) gitdomain.RefDescription { return gitdomain.RefDescription{Name: name, Type: gitdomain.RefTypeBranch, IsDefaultBranch: isDefaultBranch, CreatedDate: mustParseDate(createdDate, t)} } makeTag := func(name, createdDate string) gitdomain.RefDescription { return gitdomain.RefDescription{Name: name, Type: gitdomain.RefTypeTag, IsDefaultBranch: false, CreatedDate: mustParseDate(createdDate, t)} } expectedRefDescriptions := map[string][]gitdomain.RefDescription{ "66a7ac584740245fc523da443a3f540a52f8af72": {makeBranch("bl/symbols", "2021-01-18T16:46:51-08:00", false)}, "58537c06cf7ba8a562a3f5208fb7a8efbc971d0e": {makeBranch("bl/symbols-2", "2021-02-24T06:21:20-08:00", false)}, "a40716031ae97ee7c5cdf1dec913567a4a7c50c8": {makeBranch("ef/wtf", "2021-02-10T10:50:08-06:00", false)}, "e2e283fdaf6ea4a419cdbad142bbfd4b730080f8": {makeBranch("garo/go-and-typescript-lsif-indexing", "2020-04-29T16:45:46+00:00", false)}, "c485d92c3d2065041bf29b3fe0b55ffac7e66b2a": {makeBranch("garo/index-specific-files", "2021-03-01T13:09:42-08:00", false)}, "ce30aee6cc56f39d0ac6fee03c4c151c08a8cd2e": {makeBranch("master", "2021-06-16T11:51:09-07:00", true)}, "ec5cfc8ab33370c698273b1a097af73ea289c92b": {makeBranch("nsc/bump-go-version", "2021-03-12T22:33:17+00:00", false)}, "22b2c4f734f62060cae69da856fe3854defdcc87": {makeBranch("nsc/markupcontent", "2021-05-03T23:50:02+01:00", false)}, "9df3358a18792fa9dbd40d506f2e0ad23fc11ee8": {makeBranch("nsc/random", "2021-02-10T16:29:06+00:00", false)}, "a02b85b63345a1406d7a19727f7a5472c976e053": {makeBranch("sg/document-symbols", "2021-04-08T15:33:03-07:00", false)}, "234b0a484519129b251164ecb0674ec27d154d2f": {makeBranch("symbols", "2021-01-01T22:51:55-08:00", false)}, "6b5ae2e0ce568a7641174072271d109d7d0977c7": {gitdomain.RefDescription{Name: "v0.0.0", Type: gitdomain.RefTypeTag, IsDefaultBranch: false}}, "c165bfff52e9d4f87891bba497e3b70fea144d89": {makeTag("v0.10.0", "2020-08-04T08:23:30-05:00")}, "f73ee8ed601efea74f3b734eeb073307e1615606": {makeTag("v0.5.1", "2020-04-16T16:06:21-04:00")}, "6057f7ed8d331c82030c713b650fc8fd2c0c2347": {makeTag("v0.5.2", "2020-04-16T16:20:26-04:00")}, "7886287b8758d1baf19cf7b8253856128369a2a7": {makeTag("v0.5.3", "2020-04-16T16:55:58-04:00")}, "b69f89473bbcc04dc52cafaf6baa504e34791f5a": {makeTag("v0.6.0", "2020-04-20T12:10:49-04:00")}, "172b7fcf8b8c49b37b231693433586c2bfd1619e": {makeTag("v0.7.0", "2020-04-20T12:37:36-04:00")}, "5bc35c78fb5fb388891ca944cd12d85fd6dede95": {makeTag("v0.8.0", "2020-05-05T12:53:18-05:00")}, "14faa49ef098df9488536ca3c9b26d79e6bec4d6": {makeTag("v0.9.0", "2020-07-14T14:26:40-05:00")}, "0a82af8b6914d8c81326eee5f3a7e1d1106547f1": {makeTag("v1.0.0", "2020-08-19T19:33:39-05:00")}, "262defb72b96261a7d56b000d438c5c7ec6d0f3e": {makeTag("v1.1.0", "2020-08-21T14:15:44-05:00")}, "806b96eb544e7e632a617c26402eccee6d67faed": {makeTag("v1.1.1", "2020-08-21T16:02:35-05:00")}, "5d8865d6feacb4fce3313cade2c61dc29c6271e6": {makeTag("v1.1.2", "2020-08-22T13:45:26-05:00")}, "8c45a5635cf0a4968cc8c9dac2d61c388b53251e": {makeTag("v1.1.3", "2020-08-25T10:10:46-05:00")}, "fc212da31ce157ef0795e934381509c5a50654f6": {makeTag("v1.1.4", "2020-08-26T14:02:47-05:00")}, "4fd8b2c3522df32ffc8be983d42c3a504cc75fbc": {makeTag("v1.2.0", "2020-09-07T09:52:43-05:00")}, "9741f54aa0f14be1103b00c89406393ea4d8a08a": {makeTag("v1.3.0", "2021-02-10T23:21:31+00:00")}, "b358977103d2d66e2a3fc5f8081075c2834c4936": {makeTag("v1.3.1", "2021-02-24T20:16:45+00:00")}, "2882ad236da4b649b4c1259d815bf1a378e3b92f": {makeTag("v1.4.0", "2021-05-13T10:41:02-05:00")}, "340b84452286c18000afad9b140a32212a82840a": {makeTag("v1.5.0", "2021-05-20T18:41:41-05:00")}, } if diff := cmp.Diff(expectedRefDescriptions, refDescriptions); diff != "" { t.Errorf("unexpected ref descriptions (-want +got):\n%s", diff) } } func TestFilterRefDescriptions(t *testing.T) { ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) gitCommands := append(getGitCommandsWithFiles("file1", "file2"), getGitCommandsWithFiles("file3", "file4")...) repo := MakeGitRepository(t, gitCommands...) refDescriptions := map[string][]gitdomain.RefDescription{ "d38233a79e037d2ab8170b0d0bc0aa438473e6da": {}, "2775e60f523d3151a2a34ffdc659f500d0e73022": {}, "2ba4dd2b9a27ec125fea7d72e12b9824ead18631": {}, "9019942b8b92d5a70a7f546d97c451621c5059a6": {}, } checker := getTestSubRepoPermsChecker("file3") filtered := filterRefDescriptions(ctx, database.NewMockDB(), repo, refDescriptions, checker) expectedRefDescriptions := map[string][]gitdomain.RefDescription{ "d38233a79e037d2ab8170b0d0bc0aa438473e6da": {}, "2ba4dd2b9a27ec125fea7d72e12b9824ead18631": {}, "9019942b8b92d5a70a7f546d97c451621c5059a6": {}, } if diff := cmp.Diff(expectedRefDescriptions, filtered); diff != "" { t.Errorf("unexpected ref descriptions (-want +got):\n%s", diff) } } func TestRefDescriptions(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() gitCommands := append(getGitCommandsWithFiles("file1", "file2"), "git checkout -b my-other-branch") gitCommands = append(gitCommands, getGitCommandsWithFiles("file1-b2", "file2-b2")...) gitCommands = append(gitCommands, "git checkout -b my-branch-no-access") gitCommands = append(gitCommands, getGitCommandsWithFiles("file", "file-with-no-access")...) repo := MakeGitRepository(t, gitCommands...) makeBranch := func(name, createdDate string, isDefaultBranch bool) gitdomain.RefDescription { return gitdomain.RefDescription{Name: name, Type: gitdomain.RefTypeBranch, IsDefaultBranch: isDefaultBranch, CreatedDate: mustParseDate(createdDate, t)} } t.Run("basic", func(t *testing.T) { refDescriptions, err := RefDescriptions(ctx, db, repo, nil) if err != nil { t.Errorf("err calling RefDescriptions: %s", err) } expectedRefDescriptions := map[string][]gitdomain.RefDescription{ "2ba4dd2b9a27ec125fea7d72e12b9824ead18631": {makeBranch("master", "2006-01-02T15:04:05Z", false)}, "9d7a382983098eed6cf911bd933dfacb13116e42": {makeBranch("my-other-branch", "2006-01-02T15:04:05Z", false)}, "7cf006d0599531db799c08d3b00d7fd06da33015": {makeBranch("my-branch-no-access", "2006-01-02T15:04:05Z", true)}, } if diff := cmp.Diff(expectedRefDescriptions, refDescriptions); diff != "" { t.Errorf("unexpected ref descriptions (-want +got):\n%s", diff) } }) t.Run("with sub-repo enabled", func(t *testing.T) { checker := getTestSubRepoPermsChecker("file-with-no-access") refDescriptions, err := RefDescriptions(ctx, db, repo, checker) if err != nil { t.Errorf("err calling RefDescriptions: %s", err) } expectedRefDescriptions := map[string][]gitdomain.RefDescription{ "2ba4dd2b9a27ec125fea7d72e12b9824ead18631": {makeBranch("master", "2006-01-02T15:04:05Z", false)}, "9d7a382983098eed6cf911bd933dfacb13116e42": {makeBranch("my-other-branch", "2006-01-02T15:04:05Z", false)}, } if diff := cmp.Diff(expectedRefDescriptions, refDescriptions); diff != "" { t.Errorf("unexpected ref descriptions (-want +got):\n%s", diff) } }) } func TestCommitsUniqueToBranch(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() gitCommands := append([]string{"git checkout -b my-branch"}, getGitCommandsWithFiles("file1", "file2")...) gitCommands = append(gitCommands, getGitCommandsWithFiles("file3", "file-with-no-access")...) repo := MakeGitRepository(t, gitCommands...) t.Run("basic", func(t *testing.T) { commits, err := CommitsUniqueToBranch(ctx, db, repo, "my-branch", true, &time.Time{}, nil) if err != nil { t.Errorf("err calling RefDescriptions: %s", err) } expectedCommits := map[string]time.Time{ "2775e60f523d3151a2a34ffdc659f500d0e73022": *mustParseDate("2006-01-02T15:04:05-00:00", t), "2ba4dd2b9a27ec125fea7d72e12b9824ead18631": *mustParseDate("2006-01-02T15:04:05-00:00", t), "791ce7cd8ca2d855e12f47f8692a62bc42477edc": *mustParseDate("2006-01-02T15:04:05-00:00", t), "d38233a79e037d2ab8170b0d0bc0aa438473e6da": *mustParseDate("2006-01-02T15:04:05-00:00", t), } if diff := cmp.Diff(expectedCommits, commits); diff != "" { t.Errorf("unexpected ref descriptions (-want +got):\n%s", diff) } }) t.Run("with sub-repo enabled", func(t *testing.T) { checker := getTestSubRepoPermsChecker("file-with-no-access") commits, err := CommitsUniqueToBranch(ctx, db, repo, "my-branch", true, &time.Time{}, checker) if err != nil { t.Errorf("err calling RefDescriptions: %s", err) } expectedCommits := map[string]time.Time{ "2775e60f523d3151a2a34ffdc659f500d0e73022": *mustParseDate("2006-01-02T15:04:05-00:00", t), "2ba4dd2b9a27ec125fea7d72e12b9824ead18631": *mustParseDate("2006-01-02T15:04:05-00:00", t), "d38233a79e037d2ab8170b0d0bc0aa438473e6da": *mustParseDate("2006-01-02T15:04:05-00:00", t), } if diff := cmp.Diff(expectedCommits, commits); diff != "" { t.Errorf("unexpected ref descriptions (-want +got):\n%s", diff) } }) } func TestCommitDate(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() gitCommands := getGitCommandsWithFiles("file1", "file2") repo := MakeGitRepository(t, gitCommands...) t.Run("basic", func(t *testing.T) { _, date, commitExists, err := CommitDate(ctx, db, repo, "d38233a79e037d2ab8170b0d0bc0aa438473e6da", nil) if err != nil { t.Errorf("error fetching CommitDate: %s", err) } if !commitExists { t.Errorf("commit should exist") } if !date.Equal(time.Date(2006, 1, 2, 15, 4, 5, 0, time.UTC)) { t.Errorf("unexpected date: %s", date) } }) t.Run("with sub-repo permissions enabled", func(t *testing.T) { checker := getTestSubRepoPermsChecker("file1") _, date, commitExists, err := CommitDate(ctx, db, repo, "d38233a79e037d2ab8170b0d0bc0aa438473e6da", checker) if err != nil { t.Errorf("error fetching CommitDate: %s", err) } if commitExists { t.Errorf("expect commit to not exist since the user doesn't have access") } if !date.IsZero() { t.Errorf("expected date to be empty, got: %s", date) } }) } func TestGetCommits(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) db := database.NewMockDB() repo1 := MakeGitRepository(t, getGitCommandsWithFiles("file1", "file2")...) repo2 := MakeGitRepository(t, getGitCommandsWithFiles("file3", "file4")...) repo3 := MakeGitRepository(t, getGitCommandsWithFiles("file5", "file6")...) repoCommits := []api.RepoCommit{ {Repo: repo1, CommitID: api.CommitID("HEAD")}, // HEAD (file2) {Repo: repo1, CommitID: api.CommitID("HEAD~1")}, // HEAD~1 (file1) {Repo: repo2, CommitID: api.CommitID("67762ad757dd26cac4145f2b744fd93ad10a48e0")}, // HEAD (file4) {Repo: repo2, CommitID: api.CommitID("2b988222e844b570959a493f5b07ec020b89e122")}, // HEAD~1 (file3) {Repo: repo3, CommitID: api.CommitID("01bed0a")}, // abbrev HEAD (file6) {Repo: repo3, CommitID: api.CommitID("unresolvable")}, // unresolvable {Repo: api.RepoName("unresolvable"), CommitID: api.CommitID("deadbeef")}, // unresolvable } t.Run("basic", func(t *testing.T) { expectedCommits := []*gitdomain.Commit{ { ID: "2ba4dd2b9a27ec125fea7d72e12b9824ead18631", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit2", Parents: []api.CommitID{"d38233a79e037d2ab8170b0d0bc0aa438473e6da"}, }, { ID: "d38233a79e037d2ab8170b0d0bc0aa438473e6da", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit1", }, { ID: "67762ad757dd26cac4145f2b744fd93ad10a48e0", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit2", Parents: []api.CommitID{"2b988222e844b570959a493f5b07ec020b89e122"}, }, { ID: "2b988222e844b570959a493f5b07ec020b89e122", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit1", }, { ID: "01bed0ae660668c57539cecaacb4c33d77609f43", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit2", Parents: []api.CommitID{"d6ce2e76d171569d81c0afdc4573f461cec17d45"}, }, nil, nil, } commits, err := getCommits(ctx, db, repoCommits, true, nil) if err != nil { t.Fatalf("unexpected error calling getCommits: %s", err) } if diff := cmp.Diff(expectedCommits, commits); diff != "" { t.Errorf("unexpected commits (-want +got):\n%s", diff) } }) t.Run("with sub-repo permissions", func(t *testing.T) { expectedCommits := []*gitdomain.Commit{ { ID: "2ba4dd2b9a27ec125fea7d72e12b9824ead18631", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit2", Parents: []api.CommitID{"d38233a79e037d2ab8170b0d0bc0aa438473e6da"}, }, nil, // file 1 { ID: "67762ad757dd26cac4145f2b744fd93ad10a48e0", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit2", Parents: []api.CommitID{"2b988222e844b570959a493f5b07ec020b89e122"}, }, nil, // file 3 { ID: "01bed0ae660668c57539cecaacb4c33d77609f43", Author: gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Committer: &gitdomain.Signature{Name: "a", Email: "[email protected]", Date: *mustParseDate("2006-01-02T15:04:05Z", t)}, Message: "commit2", Parents: []api.CommitID{"d6ce2e76d171569d81c0afdc4573f461cec17d45"}, }, nil, nil, } commits, err := getCommits(ctx, db, repoCommits, true, getTestSubRepoPermsChecker("file1", "file3")) if err != nil { t.Fatalf("unexpected error calling getCommits: %s", err) } if diff := cmp.Diff(expectedCommits, commits); diff != "" { t.Errorf("unexpected commits (-want +got):\n%s", diff) } }) } func testCommits(ctx context.Context, label string, repo api.RepoName, opt CommitsOptions, checker authz.SubRepoPermissionChecker, wantTotal uint, wantCommits []*gitdomain.Commit, t *testing.T) { t.Helper() db := database.NewMockDB() commits, err := Commits(ctx, db, repo, opt, checker) if err != nil { t.Errorf("%s: Commits(): %s", label, err) return } total, err := commitCount(ctx, db, repo, opt) if err != nil { t.Errorf("%s: commitCount(): %s", label, err) return } if total != wantTotal { t.Errorf("%s: got %d total commits, want %d", label, total, wantTotal) } if len(commits) != len(wantCommits) { t.Errorf("%s: got %d commits, want %d", label, len(commits), len(wantCommits)) } checkCommits(t, label, commits, wantCommits) } func checkCommits(t *testing.T, label string, commits, wantCommits []*gitdomain.Commit) { t.Helper() for i := 0; i < len(commits) || i < len(wantCommits); i++ { var gotC, wantC *gitdomain.Commit if i < len(commits) { gotC = commits[i] } if i < len(wantCommits) { wantC = wantCommits[i] } if !CommitsEqual(gotC, wantC) { t.Errorf("%s: got commit %d == %+v, want %+v", label, i, gotC, wantC) } } } // get a test sub-repo permissions checker which allows access to all files (so should be a no-op) func getTestSubRepoPermsChecker(noAccessPaths ...string) authz.SubRepoPermissionChecker { checker := authz.NewMockSubRepoPermissionChecker() checker.EnabledFunc.SetDefaultHook(func() bool { return true }) checker.PermissionsFunc.SetDefaultHook(func(ctx context.Context, i int32, content authz.RepoContent) (authz.Perms, error) { for _, noAccessPath := range noAccessPaths { if content.Path == noAccessPath { return authz.None, nil } } return authz.Read, nil }) return checker } func getGitCommandsWithFiles(fileName1, fileName2 string) []string { return []string{ fmt.Sprintf("touch %s", fileName1), fmt.Sprintf("git add %s", fileName1), "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m commit1 --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", fmt.Sprintf("touch %s", fileName2), fmt.Sprintf("git add %s", fileName2), "GIT_COMMITTER_NAME=a [email protected] GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m commit2 --author='a <[email protected]>' --date 2006-01-02T15:04:05Z", } } func mustParseDate(s string, t *testing.T) *time.Time { t.Helper() date, err := time.Parse(time.RFC3339, s) if err != nil { t.Fatalf("unexpected error parsing date string: %s", err) } return &date }
{ t.Run("Body", func(t *testing.T) { tests := map[gitdomain.Message]string{ "hello": "", "hello\n": "", "hello\n\n": "", "hello\nworld": "world", "hello\n\nworld": "world", "hello\n\nworld\nfoo": "world\nfoo", "hello\n\nworld\nfoo\n": "world\nfoo", } for input, want := range tests { got := input.Body() if got != want { t.Errorf("got %q, want %q", got, want) } } }) }
fault-comment-card.ts
import { Component, Input, Output, EventEmitter, } from '@angular/core'; import { FormGroup, FormControl } from '@angular/forms'; import { FaultSummary } from '@shared/models/fault-marking.model'; import { TestCategory } from '@dvsa/mes-test-schema/category-definitions/common/test-category'; @Component({ selector: 'fault-comment-card', templateUrl: 'fault-comment-card.html', styleUrls: ['fault-comment-card.scss'], }) export class
{ @Input() outcome: string; @Input() formGroup: FormGroup; @Input() faultComments: FaultSummary[]; @Input() header: string; @Input() faultType: string; @Input() shouldRender: boolean; @Input() faultCount: number; @Input() maxFaultCount: number; @Input() isDelegatedTest?: boolean = false; @Input() testCategory?: TestCategory; @Output() faultCommentsChange = new EventEmitter<FaultSummary>(); ngOnChanges() { this.faultComments.forEach((value) => { const control = new FormControl(null); this.formGroup.addControl( `faultComment-${value.source}-${this.faultType}-${value.competencyIdentifier}`, control, ); }); } faultCommentChanged(faultComment: FaultSummary): void { this.faultCommentsChange.emit(faultComment); } }
FaultCommentCardComponent
classes.js
class Person { constructor(first_name, last_name) { this.first_name = first_name; this.last_name = last_name; } full_name() { return this.first_name + " " + this.last_name; } } class
extends Person { constructor(first_name, last_name, persno) { super(first_name, last_name); this.persno = persno; } info() { return this.persno + " " + this.full_name(); } } const staff = new Staff('Jane', 'Fuller', '0643256'); const message = (person) => `For more information, please contact Ms. ${person.last_name.toUpperCase()}, ${person.first_name}.` console.log(staff.info()); console.log(message(staff));
Staff
tflite_conversion.py
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint:disable=line-too-long r"""Converts distilled models to TFLite by iterating over experiment folders. The aim of this file is: 1. To get TFLite models corresponding to the trained models, but only returning the embedding (and not the target output used during training). """ # pylint:enable=line-too-long import os from absl import app from absl import flags from absl import logging import numpy as np import tensorflow as tf from non_semantic_speech_benchmark.data_prep import audio_to_embeddings_beam_utils from non_semantic_speech_benchmark.distillation import models from non_semantic_speech_benchmark.distillation.compression_lib import compression_op as compression from non_semantic_speech_benchmark.distillation.compression_lib import compression_wrapper flags.DEFINE_string( 'experiment_dir', None, '(CNS) Directory containing directories with parametrized names like ' '"1-al=1.0,ap=False,bd=2048,cop=False,lr=0.0001,ms=small,qat=False,tbs=512". ' 'Note that only the mentioned hyper-params are supported right now.') flags.DEFINE_string('output_dir', None, 'Place to write models to.') flags.DEFINE_string('checkpoint_number', None, 'Optional checkpoint number to ' 'use, instead of most recent.') flags.DEFINE_boolean('quantize', False, 'Whether to quantize converted models if possible.') flags.DEFINE_boolean('include_frontend', False, 'Whether to include frontend.') FLAGS = flags.FLAGS def get_params(experiment_dir_str): """Extracts hyperparams from experiment directory string. Args: experiment_dir_str: The folder-name for the set of hyperparams. Eg: '1-al=1.0,ap=False,bd=2048,cop=False,lr=0.0001,ms=small,qat=False,tbs=512' Returns: A dict mapping param key (str) to eval'ed value (float/eval/string). """ parsed_params = {} start_idx = experiment_dir_str.find('-') + 1 for kv in experiment_dir_str[start_idx:].split(','): key, value = kv.split('=') try: value = eval(value) # pylint: disable=eval-used except: # pylint: disable=bare-except pass parsed_params[key] = value return parsed_params def get_default_compressor(): compression_params = compression.CompressionOp.get_default_hparams().parse('') compressor = compression_wrapper.get_apply_compression( compression_params, global_step=0) return compressor def get_tflite_friendly_model(checkpoint_folder_path, params, checkpoint_number=None, include_frontend=False):
def convert_tflite_model(model, quantize, model_path): """Uses TFLiteConverter to convert a Keras Model. Args: model: Keras model obtained from get_tflite_friendly_model. quantize: Whether to quantize TFLite model using dynamic quantization. See: https://www.tensorflow.org/lite/performance/post_training_quant model_path: Path for TFLite file. """ converter = tf.lite.TFLiteConverter.from_keras_model(model) converter.target_spec.supported_ops = [ tf.lite.OpsSet.TFLITE_BUILTINS, # enable TensorFlow Lite ops. # There is a GatherV2 op in the frontend that isn't supported by TFLite # as a builtin op. (It works as a TFLite builtin only if the sample size # to the frontend is a constant) # However, TFLite supports importing some relevant operators from TF, # at the cost of binary size (~ a few MB). # See: https://www.tensorflow.org/lite/guide/ops_select # NOTE: This has no effect on the model/binary size if the graph does not # required the extra TF ops (for example, for no-frontend versio tf.lite.OpsSet.SELECT_TF_OPS # enable TensorFlow ops. ] if quantize: converter.optimizations = [tf.lite.Optimize.DEFAULT] tflite_buffer = converter.convert() with tf.io.gfile.GFile(model_path, 'wb') as f: f.write(tflite_buffer) def main(_): tf.compat.v2.enable_v2_behavior() if not tf.io.gfile.exists(FLAGS.output_dir): tf.io.gfile.makedirs(FLAGS.output_dir) # Get experiment dirs names. # NOTE: This assumes that only folders with hyperparams in their name occur # in the working dict. if not tf.io.gfile.exists(FLAGS.experiment_dir): raise ValueError(f'Experiment dir doesn\'t exist: {FLAGS.experiment_dir}') subdirs = tf.io.gfile.walk(FLAGS.experiment_dir) for subdir in subdirs: if subdir[0] == FLAGS.experiment_dir: experiment_dirs = subdir[1] break # Generate params & TFLite experiment dir names. experiment_dir_to_params = {} # Maps experiment dir name to [float model, quantized model] paths. experiment_dir_to_model = {} i = 0 for experiment_dir in experiment_dirs: logging.info('Working on hyperparams: %s', experiment_dir) i += 1 params = get_params(experiment_dir) experiment_dir_to_params[experiment_dir] = params folder_path = os.path.join(FLAGS.experiment_dir, experiment_dir) # Export SavedModel & convert to TFLite # Note that we keep over-writing the SavedModel while converting experiments # to TFLite, since we only care about the final flatbuffer models. static_model = get_tflite_friendly_model( checkpoint_folder_path=folder_path, params=params, checkpoint_number=FLAGS.checkpoint_number, include_frontend=FLAGS.include_frontend) quantize = params['qat'] model_path = os.path.join(FLAGS.output_dir, 'model_{}.tflite'.format(i)) convert_tflite_model( static_model, quantize=quantize, model_path=model_path) experiment_dir_to_model[experiment_dir] = model_path if quantize: logging.info('Exported INT8 TFLite model') else: logging.info('Exported FP32 TFLite model') logging.info('Sanity checking...') interpreter = audio_to_embeddings_beam_utils.build_tflite_interpreter( model_path) if FLAGS.include_frontend: model_input = np.zeros([1, 32000], dtype=np.float32) expected_output_shape = (7, params['bd']) else: model_input = np.zeros([1, 96, 64, 1], dtype=np.float32) expected_output_shape = (1, params['bd']) output = audio_to_embeddings_beam_utils.samples_to_embedding_tflite( model_input, sample_rate=16000, interpreter=interpreter, output_key='0') np.testing.assert_array_equal(output.shape, expected_output_shape) logging.info('Model "%s" worked.', model_path) logging.info('Total TFLite models generated: %i', i) if __name__ == '__main__': flags.mark_flags_as_required(['experiment_dir', 'output_dir']) app.run(main)
"""Given folder & training params, exports SavedModel without frontend.""" compressor = None if params['cop']: compressor = get_default_compressor() static_model = models.get_keras_model( bottleneck_dimension=params['bd'], output_dimension=0, # Don't include the unnecessary final layer. alpha=params['al'], mobilenet_size=params['ms'], frontend=include_frontend, avg_pool=params['ap'], compressor=compressor, quantize_aware_training=params['qat'], tflite=True) checkpoint = tf.train.Checkpoint(model=static_model) if checkpoint_number: checkpoint_to_load = os.path.join( checkpoint_folder_path, f'ckpt-{checkpoint_number}') assert tf.train.load_checkpoint(checkpoint_to_load) else: checkpoint_to_load = tf.train.latest_checkpoint(checkpoint_folder_path) checkpoint.restore(checkpoint_to_load).expect_partial() return static_model
test_calculate_accuracies.py
import unittest from pytorch_metric_learning.utils import accuracy_calculator import numpy as np class TestCalculateAccuracies(unittest.TestCase): def test_accuracy_calculator(self): query_labels = np.array([1, 1, 2, 3, 4]) knn_labels1 = np.array( [ [0, 1, 1, 2, 2], [1, 0, 1, 1, 3], [4, 4, 4, 4, 2], [3, 1, 3, 1, 3], [0, 0, 4, 2, 2], ] ) label_counts1 = {1: 3, 2: 5, 3: 4, 4: 5} knn_labels2 = knn_labels1 + 5 label_counts2 = {k + 5: v for k, v in label_counts1.items()} for avg_of_avgs in [False, True]: for i, (knn_labels, label_counts) in enumerate( [(knn_labels1, label_counts1), (knn_labels2, label_counts2)] ): AC = accuracy_calculator.AccuracyCalculator( exclude=("NMI", "AMI"), avg_of_avgs=avg_of_avgs ) kwargs = { "query_labels": query_labels, "label_counts": label_counts, "knn_labels": knn_labels, "not_lone_query_mask": np.ones(5).astype(np.bool) if i == 0 else np.zeros(5).astype(np.bool), } function_dict = AC.get_function_dict() for ecfss in [False, True]: if ecfss: kwargs["knn_labels"] = kwargs["knn_labels"][:, 1:] kwargs["embeddings_come_from_same_source"] = ecfss acc = AC._get_accuracy(function_dict, **kwargs) if i == 1: self.assertTrue(acc["precision_at_1"] == 0) self.assertTrue(acc["r_precision"] == 0) self.assertTrue(acc["mean_average_precision_at_r"] == 0) self.assertTrue(acc["mean_average_precision"] == 0) else: self.assertTrue( acc["precision_at_1"] == self.correct_precision_at_1(ecfss, avg_of_avgs) ) self.assertTrue( acc["r_precision"] == self.correct_r_precision(ecfss, avg_of_avgs) ) self.assertTrue( acc["mean_average_precision_at_r"] == self.correct_mean_average_precision_at_r( ecfss, avg_of_avgs ) ) self.assertTrue( acc["mean_average_precision"] == self.correct_mean_average_precision(ecfss, avg_of_avgs) ) def correct_precision_at_1(self, embeddings_come_from_same_source, avg_of_avgs): if not embeddings_come_from_same_source: if not avg_of_avgs: return 0.4 else: return (0.5 + 0 + 1 + 0) / 4 else: if not avg_of_avgs: return 1.0 / 5 else: return (0.5 + 0 + 0 + 0) / 4 def correct_r_precision(self, embeddings_come_from_same_source, avg_of_avgs): if not embeddings_come_from_same_source: acc0 = 2.0 / 3 acc1 = 2.0 / 3 acc2 = 1.0 / 5 acc3 = 2.0 / 4 acc4 = 1.0 / 5 else: acc0 = 1.0 / 1 acc1 = 1.0 / 2 acc2 = 1.0 / 4 acc3 = 1.0 / 3 acc4 = 1.0 / 4 if not avg_of_avgs: return np.mean([acc0, acc1, acc2, acc3, acc4]) else: return np.mean([(acc0 + acc1) / 2, acc2, acc3, acc4]) def correct_mean_average_precision_at_r( self, embeddings_come_from_same_source, avg_of_avgs ): if not embeddings_come_from_same_source: acc0 = (1.0 / 2 + 2.0 / 3) / 3 acc1 = (1 + 2.0 / 3) / 3 acc2 = (1.0 / 5) / 5 acc3 = (1 + 2.0 / 3) / 4 acc4 = (1.0 / 3) / 5 else: acc0 = 1 acc1 = (1.0 / 2) / 2 acc2 = (1.0 / 4) / 4 acc3 = (1.0 / 2) / 3 acc4 = (1.0 / 2) / 4 if not avg_of_avgs: return np.mean([acc0, acc1, acc2, acc3, acc4]) else: return np.mean([(acc0 + acc1) / 2, acc2, acc3, acc4]) def correct_mean_average_precision( self, embeddings_come_from_same_source, avg_of_avgs ): if not embeddings_come_from_same_source: acc0 = (1.0 / 2 + 2.0 / 3) / 2
else: acc0 = 1 acc1 = (1.0 / 2 + 2.0 / 3) / 2 acc2 = 1.0 / 4 acc3 = (1.0 / 2 + 2.0 / 4) / 2 acc4 = 1.0 / 2 if not avg_of_avgs: return np.mean([acc0, acc1, acc2, acc3, acc4]) else: return np.mean([(acc0 + acc1) / 2, acc2, acc3, acc4]) def test_get_label_counts(self): label_counts, num_k = accuracy_calculator.get_label_counts( [0, 1, 3, 2, 3, 1, 3, 3, 4, 6, 5, 10, 4, 4, 4, 4, 6, 6, 5] ) self.assertTrue( label_counts == {0: 1, 1: 2, 2: 1, 3: 4, 4: 5, 5: 2, 6: 3, 10: 1} ) self.assertTrue(num_k == 5) def test_get_lone_query_labels(self): query_labels = np.array([0, 1, 2, 3, 4, 5, 6]) reference_labels = np.array([0, 0, 0, 1, 2, 2, 3, 4, 5, 6]) reference_label_counts, _ = accuracy_calculator.get_label_counts( reference_labels ) lone_query_labels = accuracy_calculator.get_lone_query_labels( query_labels, reference_labels, reference_label_counts, True ) self.assertTrue( np.all(np.unique(lone_query_labels) == np.array([1, 3, 4, 5, 6])) ) query_labels = np.array([0, 1, 2, 3, 4]) reference_labels = np.array([0, 0, 0, 1, 2, 2, 4, 5, 6]) lone_query_labels = accuracy_calculator.get_lone_query_labels( query_labels, reference_labels, reference_label_counts, False ) self.assertTrue(np.all(np.unique(lone_query_labels) == np.array([3]))) class TestCalculateAccuraciesAndFaiss(unittest.TestCase): def test_accuracy_calculator_and_faiss(self): AC = accuracy_calculator.AccuracyCalculator(exclude=("NMI", "AMI")) query = np.arange(10)[:, None].astype(np.float32) reference = np.arange(10)[:, None].astype(np.float32) query_labels = np.arange(10).astype(np.int) reference_labels = np.arange(10).astype(np.int) acc = AC.get_accuracy(query, reference, query_labels, reference_labels, False) self.assertTrue(acc["precision_at_1"] == 1) self.assertTrue(acc["r_precision"] == 1) self.assertTrue(acc["mean_average_precision_at_r"] == 1) reference = (np.arange(20) / 2.0)[:, None].astype(np.float32) reference_labels = np.zeros(20).astype(np.int) reference_labels[::2] = query_labels reference_labels[1::2] = np.ones(10).astype(np.int) acc = AC.get_accuracy(query, reference, query_labels, reference_labels, True) self.assertTrue(acc["precision_at_1"] == 1) self.assertTrue(acc["r_precision"] == 0.5) self.assertTrue( acc["mean_average_precision_at_r"] == (1 + 2.0 / 2 + 3.0 / 5 + 4.0 / 7 + 5.0 / 9) / 10 ) def test_accuracy_calculator_and_faiss_avg_of_avgs(self): AC_global_average = accuracy_calculator.AccuracyCalculator( exclude=("NMI", "AMI"), avg_of_avgs=False ) AC_per_class_average = accuracy_calculator.AccuracyCalculator( exclude=("NMI", "AMI"), avg_of_avgs=True ) query = np.arange(10)[:, None].astype(np.float32) reference = np.arange(10)[:, None].astype(np.float32) query[-1] = 100 reference[0] = -100 query_labels = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) reference_labels = np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0]) acc = AC_global_average.get_accuracy( query, reference, query_labels, reference_labels, False ) self.assertTrue(acc["precision_at_1"] == 0.9) self.assertTrue(acc["r_precision"] == 0.9) self.assertTrue(acc["mean_average_precision_at_r"] == 0.9) acc = AC_per_class_average.get_accuracy( query, reference, query_labels, reference_labels, False ) self.assertTrue(acc["precision_at_1"] == 0.5) self.assertTrue(acc["r_precision"] == 0.5) self.assertTrue(acc["mean_average_precision_at_r"] == 0.5)
acc1 = (1 + 2.0 / 3 + 3.0 / 4) / 3 acc2 = (1.0 / 5) / 1 acc3 = (1 + 2.0 / 3 + 3.0 / 5) / 3 acc4 = (1.0 / 3) / 1
error.rs
use std::io; pub type Result<T> = std::result::Result<T, Error>; #[derive(thiserror::Error, Debug)] pub enum Error { #[error("config error: {0}")] Config(#[from] config::ConfigError), #[error("recipe list is corrupt")] CorruptedRecipeList, #[error("editor command '{0}' not found")] EditorCommandNotFound(String), #[error("io error: {0}")] Io(#[from] io::Error), #[error("output directory '{0}' already exists")] OutputDirectoryAlreadyExists(String), #[error("recipe directory '{0}' not found")] RecipeDirNotFound(String), #[error("recipe file '{0}' not found")] RecipeFileNotFound(String), #[error(transparent)] SweetPotator(sweet_potator::error::Error), #[error("template name '{0}' not configured")] TemplateNameNotConfigured(String), #[error("template name '{0}' not found")] TemplateNameNotFound(String), #[error("template engine error: {0}")] Tera(#[from] tera::Error), } impl From<sweet_potator::error::Error> for Error { fn from(error: sweet_potator::error::Error) -> Self
}
{ if let sweet_potator::error::Error::Io(error) = error { error.into() } else { Self::SweetPotator(error) } }
not_implemented.py
import logging from pyvisdk.exceptions import InvalidArgumentError
######################################## # Automatically generated, do not edit. ######################################## log = logging.getLogger(__name__) def NotImplemented(vim, *args, **kwargs): '''NotImplemented exception is thrown if the method is not yet implemeneted.''' obj = vim.client.factory.create('{urn:vim25}NotImplemented') # do some validation checking... if (len(args) + len(kwargs)) < 4: raise IndexError('Expected at least 5 arguments got: %d' % len(args)) required = [ 'dynamicProperty', 'dynamicType', 'faultCause', 'faultMessage' ] optional = [ ] for name, arg in zip(required+optional, args): setattr(obj, name, arg) for name, value in kwargs.items(): if name in required + optional: setattr(obj, name, value) else: raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional))) return obj
_dataset.py
# -*- coding: utf-8 -*- from __future__ import print_function import hashlib import os import time import warnings import requests from ._protos.public.common import CommonService_pb2 as _CommonCommonService from ._protos.public.modeldb import DatasetService_pb2 as _DatasetService from ._protos.public.modeldb import DatasetVersionService_pb2 as _DatasetVersionService from .external import six from ._internal_utils import ( _utils, importer, ) from ._dataset_versioning.dataset_versions import DatasetVersions class Dataset(object): # TODO: delete is not supported on the API yet def __init__(self, conn, conf, name=None, dataset_type=None, desc=None, tags=None, attrs=None, workspace=None, public_within_org=None, _dataset_id=None): if name is not None and _dataset_id is not None: raise ValueError("cannot specify both `name` and `_dataset_id`") if workspace is not None: WORKSPACE_PRINT_MSG = "workspace: {}".format(workspace) else: WORKSPACE_PRINT_MSG = "personal workspace" if _dataset_id is not None: dataset = Dataset._get(conn, _dataset_id=_dataset_id) if dataset is not None: print("set existing Dataset: {}".format(dataset.name)) else: raise ValueError("Dataset with ID {} not found".format(_dataset_id)) else: if name is None: name = Dataset._generate_default_name() try: dataset = Dataset._create(conn, name, dataset_type, desc, tags, attrs, workspace, public_within_org) except requests.HTTPError as e: if e.response.status_code == 403: # cannot create in other workspace dataset = Dataset._get(conn, name, workspace) if dataset is not None: print("set existing Dataset: {} from {}".format(dataset.name, WORKSPACE_PRINT_MSG)) else: # no accessible dataset in other workspace six.raise_from(e, None) elif e.response.status_code == 409: # already exists if any(param is not None for param in (desc, tags, attrs, public_within_org)): warnings.warn( "Dataset with name {} already exists;" " cannot set `desc`, `tags`, `attrs`, or `public_within_org`".format(name) ) dataset = Dataset._get(conn, name, workspace) if dataset is not None: print("set existing Dataset: {} from {}".format(dataset.name, WORKSPACE_PRINT_MSG)) else: raise RuntimeError("unable to retrieve Dataset {};" " please notify the Verta development team".format(name)) else: raise e else: print("created new Dataset: {} in {}".format(dataset.name, WORKSPACE_PRINT_MSG)) # this is available to create versions self._conn = conn self._conf = conf self.id = dataset.id # these could be updated by separate calls self.name = dataset.name self.dataset_type = self.__class__.__name__ self._dataset_type = dataset.dataset_type self.desc = dataset.description self.attrs = dataset.attributes self.tags = dataset.tags def __repr__(self): return "<{} \"{}\">".format(self.__class__.__name__, self.name) @staticmethod def _generate_default_name(): return "Dataset {}".format(_utils.generate_default_name()) @staticmethod def _get(conn, dataset_name=None, workspace=None, _dataset_id=None): if _dataset_id is not None: Message = _DatasetService.GetDatasetById msg = Message(id=_dataset_id) data = _utils.proto_to_json(msg) response = _utils.make_request("GET", "{}://{}/api/v1/modeldb/dataset/getDatasetById".format(conn.scheme, conn.socket), conn, params=data) if response.ok: dataset = _utils.json_to_proto(_utils.body_to_json(response), Message.Response).dataset return dataset else: if response.status_code == 404 and _utils.body_to_json(response)['code'] == 5: return None else: _utils.raise_for_http_error(response) elif dataset_name is not None: Message = _DatasetService.GetDatasetByName msg = Message(name=dataset_name, workspace_name=workspace) data = _utils.proto_to_json(msg) response = _utils.make_request("GET", "{}://{}/api/v1/modeldb/dataset/getDatasetByName".format(conn.scheme, conn.socket), conn, params=data) if response.ok: response_json = _utils.body_to_json(response) response_msg = _utils.json_to_proto(response_json, Message.Response) if workspace is None or response_json.get('dataset_by_user'): # user's personal workspace dataset = response_msg.dataset_by_user else: dataset = response_msg.shared_datasets[0] if not dataset.id: # 200, but empty message raise RuntimeError("unable to retrieve Dataset {};" " please notify the Verta development team".format(dataset_name)) return dataset else: if response.status_code == 404 and _utils.body_to_json(response)['code'] == 5: return None else: _utils.raise_for_http_error(response) else: raise ValueError("insufficient arguments") @staticmethod def _create(conn, dataset_name, dataset_type, desc=None, tags=None, attrs=None, workspace=None, public_within_org=None): if tags is not None: tags = _utils.as_list_of_str(tags) if attrs is not None: attrs = [_CommonCommonService.KeyValue(key=key, value=_utils.python_to_val_proto(value, allow_collection=True)) for key, value in six.viewitems(attrs)] Message = _DatasetService.CreateDataset msg = Message(name=dataset_name, dataset_type=dataset_type, description=desc, tags=tags, attributes=attrs, workspace_name=workspace) if public_within_org: if workspace is None: raise ValueError("cannot set `public_within_org` for personal workspace") elif not _utils.is_org(workspace, conn): raise ValueError( "cannot set `public_within_org`" " because workspace \"{}\" is not an organization".format(workspace) ) else: msg.dataset_visibility = _DatasetService.DatasetVisibilityEnum.ORG_SCOPED_PUBLIC data = _utils.proto_to_json(msg) response = _utils.make_request("POST", "{}://{}/api/v1/modeldb/dataset/createDataset".format(conn.scheme, conn.socket), conn, json=data) if response.ok: dataset = _utils.json_to_proto(_utils.body_to_json(response), Message.Response).dataset return dataset else: _utils.raise_for_http_error(response) def create_version(self): raise NotImplementedError('this function must be implemented by subclasses') def get_all_versions(self): """ Gets all the versions for this Dataset. Returns ------- list of DatasetVersions for this Dataset """ return DatasetVersions(self._conn, self._conf).with_dataset(self) # TODO: sorting seems to be incorrect def get_latest_version(self, ascending=None, sort_key=None): """ Gets the latest version for this Dataset. Parameters ---------- ascending : bool Whether to sort by time in ascending order sort_key : str Which key to sort on Returns ------- Returns the latest version of the Dataset as defined by ascending and the sort_key. """ Message = _DatasetVersionService.GetLatestDatasetVersionByDatasetId msg = Message(dataset_id=self.id, ascending=ascending, sort_key=sort_key) data = _utils.proto_to_json(msg) response = _utils.make_request("GET", "{}://{}/api/v1/modeldb/dataset-version/getLatestDatasetVersionByDatasetId".format(self._conn.scheme, self._conn.socket), self._conn, params=data) _utils.raise_for_http_error(response) response_msg = _utils.json_to_proto(_utils.body_to_json(response), Message.Response) print("got existing dataset version: {}".format(response_msg.dataset_version.id)) return DatasetVersion(self._conn, self._conf, _dataset_version_id=response_msg.dataset_version.id) class RawDataset(Dataset): TYPE = _DatasetService.DatasetTypeEnum.RAW def __init__(self, *args, **kwargs): super(RawDataset, self).__init__(dataset_type=self.TYPE, *args, **kwargs) class PathDataset(Dataset): TYPE = _DatasetService.DatasetTypeEnum.PATH def __init__(self, *args, **kwargs): super(PathDataset, self).__init__(dataset_type=self.TYPE, *args, **kwargs) class QueryDataset(Dataset): TYPE = _DatasetService.DatasetTypeEnum.QUERY def __init__(self, *args, **kwargs): super(QueryDataset, self).__init__(dataset_type=self.TYPE, *args, **kwargs) class S3Dataset(PathDataset): def create_version(self, bucket_name, key=None, key_prefix=None, url_stub=None, parent_id=None, desc=None, tags=None, attrs=None): """ Create a version of an S3 dataset. Parameters ---------- bucket_name : str Name of the S3 bucketing storing the data. key : str, optional Key of the object in S3. This argument cannot be provided alongside `key_prefix`. key_prefix : str, optional Key prefix to use for snapshotting multiple objects. This argument cannot be provided alongside `key`. url_stub : str, optional Prefix of the S3 URL. parent_id : str Id of the DatasetVersion from which this version was derived. desc : str, optional Description of the DatasetVersion. tags : list of str, optional Tags of the DatasetVersion. attrs : dict of str to {None, bool, float, int, str}, optional Attributes of the DatasetVersion. Returns ------- :class:`~verta._dataset.DatasetVersion` Returns the newly created dataset version """ if key is not None and key_prefix is not None: raise ValueError("cannot specify both `key` and `key_prefix`") version_info = S3DatasetVersionInfo(bucket_name, key=key, key_prefix=key_prefix, url_stub=url_stub) return PathDatasetVersion(self._conn, self._conf, dataset_id=self.id, dataset_type=self.TYPE, dataset_version_info=version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs) class LocalDataset(PathDataset): def create_version(self, path, parent_id=None, desc=None, tags=None, attrs=None): """ Create a version of a Local dataset. Parameters ---------- path : str Path to the local dataset. parent_id : str Id of the DatasetVersion from which this version was derived. desc : str, optional Description of the DatasetVersion. tags : list of str, optional Tags of the DatasetVersion. attrs : dict of str to {None, bool, float, int, str}, optional Attributes of the DatasetVersion. Returns ------- :class:`~verta._dataset.DatasetVersion` Returns the newly created dataset version """ version_info = FilesystemDatasetVersionInfo(path) return PathDatasetVersion(self._conn, self._conf, dataset_id=self.id, dataset_type=self.TYPE, dataset_version_info=version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs) class BigQueryDataset(QueryDataset): def create_version(self, job_id, location, parent_id=None, desc=None, tags=None, attrs=None): """ Create a version of a Big Query dataset. Parameters ---------- job_id : str Id of the Big Query job that created this Dataset. location : str Big Query location parameter. parent_id : str Id of the DatasetVersion from which this version was derived. desc : str, optional Description of the DatasetVersion. tags : list of str, optional Tags of the DatasetVersion. attrs : dict of str to {None, bool, float, int, str}, optional Attributes of the DatasetVersion. Returns ------- :class:`~verta._dataset.DatasetVersion` Returns the newly created dataset version """ version_info = BigQueryDatasetVersionInfo(job_id=job_id, location=location) return QueryDatasetVersion(self._conn, self._conf, dataset_id=self.id, dataset_type=self.TYPE, dataset_version_info=version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs) class RDBMSDataset(QueryDataset): def create_version(self, query, db_connection_str, execution_timestamp=None, query_template="", query_parameters=[], num_records=0, parent_id=None, desc=None, tags=None, attrs=None): """ Create a version of a generic RDBMS Query Dataset. Parameters ---------- query : str Query that was executed. db_connection_str : str Connection string for the DBMS (e.g., localhost:8080). execution_timestamp : int or float, optional Time at which the query was run. Will default to current time. query_template : str, optional Template from which the query was derived. query_parameters : list of str, optional Parameters used with the query template. num_records : int, optional Number of records returned by the query. parent_id : str, optional Id of the DatasetVersion from which this version was derived. desc : str, optional Description of the DatasetVersion. tags : list of str, optional Tags of the DatasetVersion. attrs : dict of str to {None, bool, float, int, str}, optional Attributes of the DatasetVersion. Returns ------- :class:`~verta._dataset.DatasetVersion` Returns the newly created dataset version """ version_info = RDBMSDatasetVersionInfo( query=query, db_connection_str=db_connection_str, execution_timestamp=execution_timestamp, query_template=query_template, query_parameters=query_parameters, num_records=num_records) return QueryDatasetVersion(self._conn, self._conf, dataset_id=self.id, dataset_type=self.TYPE, dataset_version_info=version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs) class AtlasHiveDataset(QueryDataset): def create_version(self, guid, atlas_url, atlas_user_name, atlas_password, atlas_entity_endpoint="/api/atlas/v2/entity/bulk", parent_id=None, desc=None, tags=None, attrs=None): """ Create a version of an AtlasHive dataset. Parameters ---------- guid : str GUID of the table being queried. atlas_url : str Path to the local dataset. atlas_user_name : str Username for Atlas atlas_password : str Password for Atlas atlas_entity_endpoint : str Endpoint for querying Atlas parent_id : str Id of the DatasetVersion from which this version was derived. desc : str, optional Description of the DatasetVersion. tags : list of str, optional Tags of the DatasetVersion. attrs : dict of str to {None, bool, float, int, str}, optional Attributes of the DatasetVersion. Returns ------- `DatasetVersion <dataset.html>`_ Returns the newly created dataset version """ version_info = AtlasHiveDatasetVersionInfo( guid=guid, atlas_url=atlas_url, atlas_user_name=atlas_user_name, atlas_password=atlas_password, atlas_entity_endpoint=atlas_entity_endpoint ) return QueryDatasetVersion(self._conn, self._conf, dataset_id=self.id, dataset_type=self.TYPE, dataset_version_info=version_info, parent_id=parent_id, desc=desc, tags=tags or version_info.tags or [], attrs=attrs or version_info.attributes or {}) class DatasetVersion(object): """ A version of a dataset at a particular point in time. Attributes ---------- id : str ID of this dataset version. base_path : str Base path of this dataset version's components. """ # TODO: visibility not done # TODO: delete version not implemented def __init__(self, conn, conf, dataset_id=None, dataset_type=None, dataset_version_info=None, parent_id=None, desc=None, tags=None, attrs=None, version=None, _dataset_version_id=None): if _dataset_version_id is not None: # retrieve dataset version by id dataset_version = DatasetVersion._get(conn, _dataset_version_id) if dataset_version is None: raise ValueError("DatasetVersion with ID {} not found".format(_dataset_version_id)) else: # create new version under dataset if dataset_id is None: raise ValueError('dataset_id must be specified') # create a new dataset version try: dataset_version = DatasetVersion._create( conn, dataset_id, dataset_type, dataset_version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs, version=version ) # TODO: handle dups except requests.HTTPError as e: # if e.response.status_code == 409: # already exists # if any(param is not None for param in (desc, tags, attrs)): # warnings.warn("Dataset with name {} already exists;" # " cannot initialize `desc`, `tags`, or `attrs`".format(dataset_name)) # dataset_version = DatasetVersion._get(conn, dataset_id, version) # else: # raise e raise e else: print("created new DatasetVersion: {}" .format(dataset_version.version)) self._conn = conn self._conf = conf self.dataset_id = dataset_version.dataset_id # this info can be captured via a separate call too self.parent_id = dataset_version.parent_id self.desc = dataset_version.description self.tags = dataset_version.tags self.attrs = dataset_version.attributes self.id = dataset_version.id self.version = dataset_version.version self._dataset_type = dataset_version.dataset_type self.dataset_version = dataset_version version_info_oneof = dataset_version.WhichOneof('dataset_version_info') if version_info_oneof: self.dataset_version_info = getattr(dataset_version, version_info_oneof) else: self.dataset_version_info = None # assign base_path to proto msg to restore a level of backwards-compatibility try: self.dataset_version.path_dataset_version_info.base_path = self.base_path except AttributeError: # unsupported non-path dataset or multiple base_paths pass def __repr__(self): if self.dataset_version: msg_copy = self.dataset_version.__class__() msg_copy.CopyFrom(self.dataset_version) msg_copy.owner = '' # hide owner field return msg_copy.__repr__() elif self.dataset_version_info: msg_copy = self.dataset_version_info.__class__() msg_copy.CopyFrom(self.dataset_version_info) # pylint: disable=no-member return msg_copy.__repr__() # pylint: disable=no-member else: return "<{} \"{}\">".format(self.__class__.__name__, self.version) # TODO: get by dataset_id and version is not supported on the backend @staticmethod def _get(conn, _dataset_version_id=None): if _dataset_version_id is not None: Message = _DatasetVersionService.GetDatasetVersionById msg = Message(id=_dataset_version_id) data = _utils.proto_to_json(msg) response = _utils.make_request( "GET", "{}://{}/api/v1/modeldb/dataset-version/getDatasetVersionById".format(conn.scheme, conn.socket), conn, params=data ) if response.ok: dataset_version = _utils.json_to_proto(_utils.body_to_json(response), Message.Response).dataset_version if not dataset_version.id: # 200, but empty message raise RuntimeError("unable to retrieve DatasetVersion {};" " please notify the Verta development team".format(_dataset_version_id)) return dataset_version else: if response.status_code == 404 and _utils.body_to_json(response)['code'] == 5: return None else: _utils.raise_for_http_error(response) else: raise ValueError("insufficient arguments") @staticmethod def _create(conn, dataset_id, dataset_type, dataset_version_info, parent_id=None, desc=None, tags=None, attrs=None, version=None): if tags is not None: tags = _utils.as_list_of_str(tags) if attrs is not None: attrs = [_CommonCommonService.KeyValue(key=key, value=_utils.python_to_val_proto(value, allow_collection=True)) for key, value in six.viewitems(attrs)] if dataset_type == _DatasetService.DatasetTypeEnum.PATH: msg = PathDatasetVersion.make_create_message( dataset_id, dataset_type, dataset_version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs, version=version ) elif dataset_type == _DatasetService.DatasetTypeEnum.QUERY: msg = QueryDatasetVersion.make_create_message( dataset_id, dataset_type, dataset_version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs, version=version ) else: msg = RawDatasetVersion.make_create_message( dataset_id, dataset_type, dataset_version_info, parent_id=parent_id, desc=desc, tags=tags, attrs=attrs, version=version ) data = _utils.proto_to_json(msg) response = _utils.make_request("POST", "{}://{}/api/v1/modeldb/dataset-version/createDatasetVersion".format(conn.scheme, conn.socket), conn, json=data) if response.ok: dataset_version = _utils.json_to_proto(_utils.body_to_json(response), _DatasetVersionService.CreateDatasetVersion.Response).dataset_version return dataset_version else: _utils.raise_for_http_error(response) @staticmethod def make_create_message(dataset_id, dataset_type, dataset_version_info, parent_id=None, desc=None, tags=None, attrs=None, version=None): raise NotImplementedError('this function must be implemented by subclasses') @property def base_path(self): components = self.list_components() base_paths = set(component.base_path for component in components) if len(base_paths) == 1: return base_paths.pop() else: # shouldn't happen: DVs don't have an interface to have different base paths raise AttributeError("multiple base paths among components: {}".format(base_paths)) def list_components(self): """ Returns a list of this dataset version's components. Returns ------- list of :class:`~verta.dataset._dataset.Component` """ # there's a circular import if imported at module-level # which I don't fully understand, and even breaks in Python 3 # so this import is deferred to this function body from .dataset import _dataset blob = self.dataset_version.dataset_blob content_oneof = blob.WhichOneof('content') if content_oneof: # determine component type if content_oneof == "s3": component_cls = _dataset.S3Component elif content_oneof == "path": component_cls = _dataset.Component else: # shouldn't happen raise RuntimeError( "found unexpected dataset type {};" " please notify the Verta development team".format(content_oneof) ) # return list of component objects components = getattr(blob, content_oneof).components return list(map(component_cls._from_proto, components)) return [] class RawDatasetVersion(DatasetVersion): def __init__(self, *args, **kwargs): super(RawDatasetVersion, self).__init__(*args, **kwargs) self.dataset_version_info = self.dataset_version.raw_dataset_version_info @staticmethod def make_create_message(dataset_id, dataset_type, dataset_version_info, parent_id=None, desc=None, tags=None, attrs=None, version=None):
version_msg = _DatasetVersionService.RawDatasetVersionInfo converted_dataset_version_info = version_msg( size=dataset_version_info.size, features=dataset_version_info.features, num_records=dataset_version_info.num_records, object_path=dataset_version_info.object_path, checksum=dataset_version_info.checksum ) msg = Message(dataset_id=dataset_id, parent_id=parent_id, description=desc, tags=tags, dataset_type=dataset_type, attributes=attrs, version=version, raw_dataset_version_info=converted_dataset_version_info) return msg class PathDatasetVersion(DatasetVersion): def __init__(self, *args, **kwargs): super(PathDatasetVersion, self).__init__(*args, **kwargs) self.dataset_version_info = self.dataset_version.path_dataset_version_info @staticmethod def make_create_message(dataset_id, dataset_type, dataset_version_info, parent_id=None, desc=None, tags=None, attrs=None, version=None): if tags is not None: tags = _utils.as_list_of_str(tags) Message = _DatasetVersionService.CreateDatasetVersion # turn dataset_version_info into proto format version_msg = _DatasetVersionService.PathDatasetVersionInfo converted_dataset_version_info = version_msg( location_type=dataset_version_info.location_type, size=dataset_version_info.size, dataset_part_infos=dataset_version_info.dataset_part_infos, base_path=dataset_version_info.base_path ) msg = Message(dataset_id=dataset_id, parent_id=parent_id, description=desc, tags=tags, dataset_type=dataset_type, attributes=attrs, version=version, path_dataset_version_info=converted_dataset_version_info) return msg class QueryDatasetVersion(DatasetVersion): def __init__(self, *args, **kwargs): super(QueryDatasetVersion, self).__init__(*args, **kwargs) self.dataset_version_info = self.dataset_version.query_dataset_version_info @staticmethod def make_create_message(dataset_id, dataset_type, dataset_version_info, parent_id=None, desc=None, tags=None, attrs=None, version=None): if tags is not None: tags = _utils.as_list_of_str(tags) Message = _DatasetVersionService.CreateDatasetVersion version_msg = _DatasetVersionService.QueryDatasetVersionInfo converted_dataset_version_info = version_msg( query=dataset_version_info.query, query_template=dataset_version_info.query_template, query_parameters=dataset_version_info.query_parameters, data_source_uri=dataset_version_info.data_source_uri, execution_timestamp=dataset_version_info.execution_timestamp, num_records=dataset_version_info.num_records ) msg = Message(dataset_id=dataset_id, parent_id=parent_id, description=desc, tags=tags, dataset_type=dataset_type, attributes=attrs, version=version, # different dataset versions query_dataset_version_info=converted_dataset_version_info) return msg class PathDatasetVersionInfo(object): def __init__(self): self.dataset_part_infos = [] def compute_dataset_size(self): self.size = 0 for dataset_part_info in self.dataset_part_infos: self.size += dataset_part_info.size def get_dataset_part_infos(self): raise NotImplementedError('Implemented only in subclasses') class FilesystemDatasetVersionInfo(PathDatasetVersionInfo): def __init__(self, path): self.base_path = os.path.abspath(os.path.expanduser(path)) super(FilesystemDatasetVersionInfo, self).__init__() self.location_type = _DatasetVersionService.PathLocationTypeEnum.LOCAL_FILE_SYSTEM self.dataset_part_infos = self.get_dataset_part_infos() self.compute_dataset_size() def get_dataset_part_infos(self): dataset_part_infos = [] # find all files there and create dataset_part_infos if os.path.isdir(self.base_path): dir_infos = os.walk(self.base_path) for root, _, filenames in dir_infos: for filename in filenames: dataset_part_infos.append(self.get_file_info(root + "/" + filename)) else: dataset_part_infos.append(self.get_file_info(self.base_path)) # raise NotImplementedError('Only local files or S3 supported') return dataset_part_infos def get_file_info(self, path): dataset_part_info = _DatasetVersionService.DatasetPartInfo() dataset_part_info.path = path dataset_part_info.size = os.path.getsize(path) dataset_part_info.checksum = self.compute_file_hash(path) dataset_part_info.last_modified_at_source = _utils.timestamp_to_ms(int(os.path.getmtime(path))) return dataset_part_info def compute_file_hash(self, path): BLOCKSIZE = 65536 hasher = hashlib.md5() with open(path, 'rb') as afile: buf = afile.read(BLOCKSIZE) while len(buf) > 0: hasher.update(buf) buf = afile.read(BLOCKSIZE) return hasher.hexdigest() class S3DatasetVersionInfo(PathDatasetVersionInfo): def __init__(self, bucket_name, key=None, key_prefix=None, url_stub=None): super(S3DatasetVersionInfo, self).__init__() self.location_type = _DatasetVersionService.PathLocationTypeEnum.S3_FILE_SYSTEM self.bucket_name = bucket_name self.key = key self.key_prefix = key_prefix self.url_stub = url_stub self.base_path = ("" if url_stub is None else url_stub) + bucket_name \ + (("/" + key) if key is not None else "") self.dataset_part_infos = self.get_dataset_part_infos() self.compute_dataset_size() def get_dataset_part_infos(self): boto3 = importer.maybe_dependency("boto3") if boto3 is None: # Boto 3 not installed six.raise_from(ImportError("Boto 3 is not installed; try `pip install boto3`"), None) conn = boto3.client('s3') dataset_part_infos = [] if self.key is not None: # look up object by key obj = conn.head_object(Bucket=self.bucket_name, Key=self.key) self._append_s3_object_info(dataset_part_infos, obj, self.key) elif self.key_prefix is not None: # look up objects by key prefix for obj in conn.list_objects(Bucket=self.bucket_name, Prefix=self.key_prefix)['Contents']: self._append_s3_object_info(dataset_part_infos, obj) else: # look up all objects in bucket for obj in conn.list_objects(Bucket=self.bucket_name)['Contents']: self._append_s3_object_info(dataset_part_infos, obj) return dataset_part_infos def _append_s3_object_info(self, dataset_part_infos, object_info, key=None): if self._is_folder(key or object_info['Key']): # folder, not object return dataset_part_infos.append(self.get_s3_object_info(object_info, key)) @staticmethod def _is_folder(key): return key.endswith('/') @staticmethod def get_s3_object_info(object_info, key=None): # S3 also provides version info that could be used: # https://boto3.amazonaws.com/api/v1/modeldb/documentation/api/latest/reference/services/s3.html dataset_part_info = _DatasetVersionService.DatasetPartInfo() dataset_part_info.path = object_info['Key'] if key is None else key dataset_part_info.size = object_info['Size'] if key is None else object_info['ContentLength'] dataset_part_info.checksum = object_info['ETag'] dataset_part_info.last_modified_at_source = _utils.timestamp_to_ms(_utils.ensure_timestamp(object_info['LastModified'])) return dataset_part_info class QueryDatasetVersionInfo(object): def __init__(self, job_id=None, query="", execution_timestamp="", data_source_uri="", query_template="", query_parameters=[], num_records=0): if not query: raise ValueError("query not found") self.query = query self.execution_timestamp = execution_timestamp self.data_source_uri = data_source_uri self.query_template = query_template self.query_parameters = query_parameters self.num_records = num_records class AtlasHiveDatasetVersionInfo(QueryDatasetVersionInfo): def __init__(self, guid="", atlas_url="", atlas_user_name="", atlas_password="", atlas_entity_endpoint="/api/atlas/v2/entity/bulk", parent_id=None, desc=None, tags=None, attrs=None): if guid and atlas_url: self.guid = guid atlas_entity_details = self.get_entity_details(guid, atlas_url, atlas_user_name, atlas_password, atlas_entity_endpoint) if len(atlas_entity_details['entities']) != 1: raise ValueError("Error fetching details of entity from Atlas") table_obj = atlas_entity_details['entities'][0] if table_obj['typeName'] != 'hive_table': raise NotImplementedError("Atlas dataset currently supported only for Hive tables") #TODO: what is the execution timestamp? Should the user log this later self.execution_timestamp = _utils.now() self.data_source_uri = atlas_url + "/index.html#!/detailPage/" + guid self.query = self.generate_query(table_obj) #TODO: extract the query template self.query_template = self.query self.query_parameters = [] self.num_records = int(table_obj['attributes']['parameters']['numRows']) self.attributes = self.get_attributes(table_obj) self.tags = self.get_tags(table_obj) else: super(AtlasHiveDatasetVersionInfo, self).__init__() @staticmethod def get_tags(table_obj): verta_tags = [] if 'classifications' in table_obj: atlas_classifications = table_obj['classifications'] for atlas_classification in atlas_classifications: verta_tags.append(atlas_classification['typeName']) return verta_tags @staticmethod def get_entity_details(guid, atlas_url, atlas_user_name, atlas_password, atlas_entity_endpoint): response = requests.get(atlas_url + atlas_entity_endpoint, auth=(atlas_user_name, atlas_password), params={'guid': guid}) return _utils.body_to_json(response) @staticmethod def generate_query(table_obj): table_name = table_obj['attributes']['name'] # store as attribute database_name = table_obj['relationshipAttributes']['db']['displayText'] #store as atrribute query = "select * from {}.{}".format(database_name, table_name) return query @staticmethod def get_attributes(table_obj): attribute_keyvals = [] attributes = {} attributes['type'] = table_obj['typeName'] attributes['table_name'] = table_obj['attributes']['name'] # store as attribute attributes['database_name'] = table_obj['relationshipAttributes']['db']['displayText'] #store as atrribute attributes['col_names'] = AtlasHiveDatasetVersionInfo.get_columns(table_obj) attributes['created_time'] = table_obj['createTime'] attributes['update_time'] = table_obj['updateTime'] attributes['load_queries'] = AtlasHiveDatasetVersionInfo.get_inbound_queries(table_obj) # for key, value in six.viewitems(attributes): # attribute_keyvals.append(_CommonCommonService.KeyValue(key=key, # value=_utils.python_to_val_proto(value, allow_collection=True))) # return attribute_keyvals return attributes @staticmethod def get_columns(table_obj): column_objs = table_obj['relationshipAttributes']['columns'] col_names = [] for column_obj in column_objs: col_names.append(column_obj['displayText']) return col_names @staticmethod def get_inbound_queries(table_obj): verta_input_processes = [] atlas_input_processes = table_obj['relationshipAttributes']['outputFromProcesses'] for atlas_input_process in atlas_input_processes: verta_input_processes.append(atlas_input_process['displayText']) return verta_input_processes class BigQueryDatasetVersionInfo(QueryDatasetVersionInfo): def __init__(self, job_id=None, query="", location="", execution_timestamp="", data_source_uri="", query_template="", query_parameters=[], num_records=0): """https://googleapis.github.io/google-cloud-python/latest/bigquery/generated/google.cloud.bigquery.job.QueryJob.html#google.cloud.bigquery.job.QueryJob.query_plan""" if job_id is not None and location: self.job_id = job_id job = self.get_bq_job(job_id, location) self.execution_timestamp = _utils.timestamp_to_ms(_utils.ensure_timestamp(job.started)) self.data_source_uri = job.self_link self.query = job.query #TODO: extract the query template self.query_template = job.query self.query_parameters = [] self.num_records = job.to_dataframe().shape[0] else: super(BigQueryDatasetVersionInfo, self).__init__() @staticmethod def get_bq_job(job_id, location): bigquery = importer.maybe_dependency("google.cloud.bigquery") if bigquery is None: # BigQuery not installed six.raise_from(ImportError("BigQuery is not installed;" " try `pip install google-cloud-bigquery`"), None) client = bigquery.Client() return client.get_job(job_id, location=location) class RDBMSDatasetVersionInfo(QueryDatasetVersionInfo): def __init__(self, query="", db_connection_str="", execution_timestamp=None, query_template="", query_parameters=[], num_records=0): if execution_timestamp is None: self.execution_timestamp = _utils.timestamp_to_ms(time.time()) self.data_source_uri = db_connection_str self.query = query self.query_template = query_template self.query_parameters = query_parameters self.num_records = num_records
if tags is not None: tags = _utils.as_list_of_str(tags) Message = _DatasetVersionService.CreateDatasetVersion
text.rs
/* * Hurl (https://hurl.dev) * Copyright (C) 2022 Orange * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ use colored::*; use hurl_core::ast::*; use super::token::*; pub fn
(hurl_file: HurlFile, color: bool) -> String { let mut buffer = String::from(""); for token in hurl_file.tokenize() { buffer.push_str(format_token(token, color).as_str()); } buffer } pub fn format_token(token: Token, color: bool) -> String { match token { Token::Whitespace(value) => value, Token::Method(value) => { if color { value.yellow().to_string() } else { value } } Token::Version(value) => value, Token::Status(value) => value, Token::SectionHeader(value) => { if color { value.magenta().to_string() } else { value } } Token::Comment(value) => { if color { value.bright_black().to_string() } else { value } } Token::Value(value) => value, Token::Colon(value) => value, Token::QueryType(value) => { if color { value.cyan().to_string() } else { value } } Token::PredicateType(value) => { if color { value.yellow().to_string() } else { value } } Token::Not(value) => { if color { value.yellow().to_string() } else { value } } Token::Boolean(value) | Token::Number(value) => { if color { value.cyan().to_string() } else { value } } Token::String(value) => { if color { value.green().to_string() } else { value } } Token::Quote(value) => { if color { value.green().to_string() } else { value } } Token::CodeDelimiter(value) => { if color { value.green().to_string() } else { value } } Token::CodeVariable(value) => { if color { value.green().to_string() } else { value } } Token::Keyword(value) => value, } }
format
value.go
/* Copyright 2017 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package vm import ( "errors" "fmt" "github.com/zenreach/go-jsonnet/ast" ) // value represents a concrete jsonnet value of a specific type. // Various operations on values are allowed, depending on their type. // All values are of course immutable. type value interface { aValue() getType() *valueType } type valueType struct { name string } var stringType = &valueType{"string"} var numberType = &valueType{"number"} var functionType = &valueType{"function"} var objectType = &valueType{"object"} var booleanType = &valueType{"boolean"} var nullType = &valueType{"null"} var arrayType = &valueType{"array"} // potentialValue is something that may be evaluated to a concrete value. // The result of the evaluation may *NOT* depend on the current state // of the interpreter. The evaluation may fail. // // It can be used to represent lazy values (e.g. variables values in jsonnet // are not calculated before they are used). It is also a useful abstraction // in other cases like error handling. // // It may or may not require arbitrary computation when getValue is called the // first time, but any subsequent calls will immediately return. // // TODO(sbarzowski) perhaps call it just "Thunk"? type potentialValue interface { // fromWhere keeps the information from where the evaluation was requested. getValue(i *interpreter, fromWhere TraceElement) (value, error) aPotentialValue() } // A set of variables with associated thunks. type bindingFrame map[ast.Identifier]*cachedThunk type valueBase struct{} func (v *valueBase) aValue() {} // Primitive values // ------------------------------------- // valueString represents a string value, internally using a []rune for quick // indexing. type valueString struct { valueBase // We use rune slices instead of strings for quick indexing value []rune } func (s *valueString) index(i *interpreter, trace TraceElement, index int) (value, error) { if 0 <= index && index < s.length() { return makeValueString(string(s.value[index])), nil } return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, s.length()), trace) } func concatStrings(a, b *valueString) *valueString { result := make([]rune, 0, len(a.value)+len(b.value)) for _, r := range a.value { result = append(result, r) } for _, r := range b.value { result = append(result, r) } return &valueString{value: result} } func stringLessThan(a, b *valueString) bool { var length int if len(a.value) < len(b.value) { length = len(a.value) } else { length = len(b.value) } for i := 0; i < length; i++ { if a.value[i] != b.value[i] { return a.value[i] < b.value[i] } } return len(a.value) < len(b.value) } func stringEqual(a, b *valueString) bool { if len(a.value) != len(b.value) { return false } for i := 0; i < len(a.value); i++ { if a.value[i] != b.value[i] { return false } } return true } func (s *valueString) length() int { return len(s.value) } func (s *valueString) getString() string { return string(s.value) } func makeValueString(v string) *valueString { return &valueString{value: []rune(v)} } func (*valueString) getType() *valueType { return stringType } type valueBoolean struct { valueBase value bool } func (*valueBoolean) getType() *valueType { return booleanType } func makeValueBoolean(v bool) *valueBoolean { return &valueBoolean{value: v} } func (b *valueBoolean) not() *valueBoolean { return makeValueBoolean(!b.value) } type valueNumber struct { valueBase value float64 } func (*valueNumber) getType() *valueType { return numberType } func makeValueNumber(v float64) *valueNumber { return &valueNumber{value: v} } func intToValue(i int) *valueNumber { return makeValueNumber(float64(i)) } func int64ToValue(i int64) *valueNumber { return makeValueNumber(float64(i)) } type valueNull struct { valueBase } var nullValue valueNull func makeValueNull() *valueNull { return &nullValue } func (*valueNull) getType() *valueType { return nullType } // ast.Array // ------------------------------------- type valueArray struct { valueBase elements []*cachedThunk } func (arr *valueArray) index(i *interpreter, trace TraceElement, index int) (value, error) { if 0 <= index && index < arr.length() { return i.evaluatePV(arr.elements[index], trace) } return nil, i.Error(fmt.Sprintf("Index %d out of bounds, not within [0, %v)", index, arr.length()), trace) } func (arr *valueArray) length() int { return len(arr.elements) } func makeValueArray(elements []*cachedThunk) *valueArray { // We don't want to keep a bigger array than necessary // so we create a new one with minimal capacity var arrayElems []*cachedThunk if len(elements) == cap(elements) { arrayElems = elements } else { arrayElems = make([]*cachedThunk, len(elements)) for i := range elements { arrayElems[i] = elements[i] } } return &valueArray{ elements: arrayElems, } } func concatArrays(a, b *valueArray) *valueArray { result := make([]*cachedThunk, 0, len(a.elements)+len(b.elements)) for _, r := range a.elements { result = append(result, r) } for _, r := range b.elements { result = append(result, r) } return &valueArray{elements: result} } func (*valueArray) getType() *valueType { return arrayType } // ast.Function // ------------------------------------- type valueFunction struct { valueBase ec evalCallable } // TODO(sbarzowski) better name? type evalCallable interface { EvalCall(args callArguments, i *interpreter, trace TraceElement) (value, error) Parameters() Parameters } func (f *valueFunction) call(i *interpreter, trace TraceElement, args callArguments) (value, error) { err := checkArguments(i, trace, args, f.parameters()) if err != nil { return nil, err } return f.ec.EvalCall(args, i, trace) } func (f *valueFunction) parameters() Parameters { return f.ec.Parameters() } func checkArguments(i *interpreter, trace TraceElement, args callArguments, params Parameters) error { received := make(map[ast.Identifier]bool) accepted := make(map[ast.Identifier]bool) numPassed := len(args.positional) numExpected := len(params.required) + len(params.optional) if numPassed > numExpected { return i.Error(fmt.Sprintf("function expected %v positional argument(s), but got %v", numExpected, numPassed), trace) } for _, param := range params.required { accepted[param] = true } for _, param := range params.optional { accepted[param.name] = true } for i := range args.positional { if i < len(params.required) { received[params.required[i]] = true } else { received[params.optional[i-len(params.required)].name] = true } } for _, arg := range args.named { if _, present := received[arg.name]; present { return i.Error(fmt.Sprintf("Argument %v already provided", arg.name), trace) } if _, present := accepted[arg.name]; !present { return i.Error(fmt.Sprintf("function has no parameter %v", arg.name), trace) } received[arg.name] = true } for _, param := range params.required { if _, present := received[param]; !present { return i.Error(fmt.Sprintf("Missing argument: %v", param), trace) } } return nil } func (f *valueFunction) getType() *valueType { return functionType } // Parameters represents required position and optional named parameters for a // function definition. type Parameters struct { required ast.Identifiers optional []namedParameter } type namedParameter struct { name ast.Identifier defaultArg ast.Node } type potentialValueInEnv interface { inEnv(env *environment) *cachedThunk } type callArguments struct { positional []*cachedThunk named []namedCallArgument tailstrict bool } type namedCallArgument struct { name ast.Identifier pv *cachedThunk } func args(xs ...*cachedThunk) callArguments { return callArguments{positional: xs} } // Objects // ------------------------------------- // Object is a value that allows indexing (taking a value of a field) // and combining through mixin inheritence (operator +). // // Note that every time a field is indexed it evaluates it again, there is // no caching of field values. See: https://github.com/zenreach/go-jsonnet/issues/113 type valueObject interface { value inheritanceSize() int index(i *interpreter, trace TraceElement, field string) (value, error) assertionsChecked() bool setAssertionsCheckResult(err error) getAssertionsCheckResult() error } type selfBinding struct { // self is the lexically nearest object we are in, or nil. Note // that this is not the same as context, because we could be inside a function, // inside an object and then context would be the function, but self would still point // to the object. self valueObject // superDepth is the "super" level of self. Sometimes, we look upwards in the // inheritance tree, e.g. via an explicit use of super, or because a given field // has been inherited. When evaluating a field from one of these super objects, // we need to bind self to the concrete object (so self must point // there) but uses of super should be resolved relative to the object whose // field we are evaluating. Thus, we keep a second field for that. This is // usually 0, unless we are evaluating a super object's field. // TODO(sbarzowski) provide some examples // TODO(sbarzowski) provide somewhere a complete explanation of the object model superDepth int } func makeUnboundSelfBinding() selfBinding { return selfBinding{ nil, 123456789, // poison value } } func objectBinding(obj valueObject) selfBinding { return selfBinding{self: obj, superDepth: 0} } func (sb selfBinding) super() selfBinding { return selfBinding{self: sb.self, superDepth: sb.superDepth + 1} } // Hidden represents wether to include hidden fields in a lookup. type Hidden int // With/without hidden fields const ( withHidden Hidden = iota withoutHidden ) func withHiddenFromBool(with bool) Hidden { if with { return withHidden } return withoutHidden } // Hack - we need to distinguish not-checked-yet and no error situations // so we have a special value for no error and nil means that we don't know yet. var errNoErrorInObjectInvariants = errors.New("no error - assertions passed") type valueObjectBase struct { valueBase assertionError error } func (*valueObjectBase) getType() *valueType { return objectType } func (obj *valueObjectBase) assertionsChecked() bool { // nil - not checked yet // errNoErrorInObjectInvariants - we checked and there is no error (or checking in progress) return obj.assertionError != nil } func (obj *valueObjectBase) setAssertionsCheckResult(err error) { if err != nil { obj.assertionError = err } else { obj.assertionError = errNoErrorInObjectInvariants } } func (obj *valueObjectBase) getAssertionsCheckResult() error { if obj.assertionError == nil { panic("Assertions not checked yet") } if obj.assertionError == errNoErrorInObjectInvariants { return nil } return obj.assertionError } // valueSimpleObject represents a flat object (no inheritance). // Note that it can be used as part of extended objects // in inheritance using operator +. // // Fields are late bound (to object), so they are not values or potentialValues. // This is important for inheritance, for example: // Let a = {x: 42} and b = {y: self.x}. Evaluating b.y is an error, // but (a+b).y evaluates to 42. type valueSimpleObject struct { valueObjectBase upValues bindingFrame fields simpleObjectFieldMap asserts []unboundField } func checkAssertionsHelper(i *interpreter, trace TraceElement, obj valueObject, curr valueObject, superDepth int) error { switch curr := curr.(type) { case *valueExtendedObject: err := checkAssertionsHelper(i, trace, obj, curr.right, superDepth) if err != nil { return err } err = checkAssertionsHelper(i, trace, obj, curr.left, superDepth+curr.right.inheritanceSize()) if err != nil { return err } return nil case *valueSimpleObject: for _, assert := range curr.asserts { sb := selfBinding{self: obj, superDepth: superDepth} _, err := assert.evaluate(i, trace, sb, curr.upValues, "") if err != nil { return err } } return nil default: panic(fmt.Sprintf("Unknown object type %#v", obj)) } } func checkAssertions(i *interpreter, trace TraceElement, obj valueObject) error { if !obj.assertionsChecked() { // Assertions may refer to the object that will normally // trigger checking of assertions, resulting in an endless recursion. // To avoid that, while we check them, we treat them as already passed. obj.setAssertionsCheckResult(errNoErrorInObjectInvariants) obj.setAssertionsCheckResult(checkAssertionsHelper(i, trace, obj, obj, 0)) } return obj.getAssertionsCheckResult() } func (o *valueSimpleObject) index(i *interpreter, trace TraceElement, field string) (value, error) { return objectIndex(i, trace, objectBinding(o), field) } func (*valueSimpleObject) inheritanceSize() int { return 1 } func makeValueSimpleObject(b bindingFrame, fields simpleObjectFieldMap, asserts []unboundField) *valueSimpleObject { return &valueSimpleObject{ upValues: b, fields: fields, asserts: asserts, } } type simpleObjectFieldMap map[string]simpleObjectField type simpleObjectField struct { hide ast.ObjectFieldHide field unboundField } // unboundField is a field that doesn't know yet in which object it is. type unboundField interface { evaluate(i *interpreter, trace TraceElement, sb selfBinding, origBinding bindingFrame, fieldName string) (value, error) } // valueExtendedObject represents an object created through inheritence (left + right). // We represent it as the pair of objects. This results in a tree-like structure. // Example: // (A + B) + C // // + // / \ // + C // / \ // A B // // It is possible to create an arbitrary binary tree. // Note however, that because + is associative the only thing that matters // is the order of leafs. // // This represenation allows us to implement "+" in O(1), // but requires going through the tree and trying subsequent leafs for field access. // type valueExtendedObject struct { valueObjectBase left, right valueObject totalInheritanceSize int } func (o *valueExtendedObject) index(i *interpreter, trace TraceElement, field string) (value, error) { return objectIndex(i, trace, objectBinding(o), field) } func (o *valueExtendedObject) inheritanceSize() int { return o.totalInheritanceSize } func makeValueExtendedObject(left, right valueObject) *valueExtendedObject { return &valueExtendedObject{ left: left, right: right, totalInheritanceSize: left.inheritanceSize() + right.inheritanceSize(), } } // findField returns a field in object curr, with superDepth at least minSuperDepth // It also returns an associated bindingFrame and actual superDepth that the field // was found at. func findField(curr value, minSuperDepth int, f string) (bool, simpleObjectField, bindingFrame, int) { switch curr := curr.(type) { case *valueExtendedObject: if curr.right.inheritanceSize() > minSuperDepth { found, field, frame, counter := findField(curr.right, minSuperDepth, f) if found { return true, field, frame, counter } } found, field, frame, counter := findField(curr.left, minSuperDepth-curr.right.inheritanceSize(), f) return found, field, frame, counter + curr.right.inheritanceSize() case *valueSimpleObject: if minSuperDepth <= 0 { if field, ok := curr.fields[f]; ok { return true, field, curr.upValues, 0 } } return false, simpleObjectField{}, nil, 0 default: panic(fmt.Sprintf("Unknown object type %#v", curr)) } } func objectIndex(i *interpreter, trace TraceElement, sb selfBinding, fieldName string) (value, error) { err := checkAssertions(i, trace, sb.self) if err != nil { return nil, err } if sb.superDepth >= sb.self.inheritanceSize() { return nil, i.Error("Attempt to use super when there is no super class.", trace) } found, field, upValues, foundAt := findField(sb.self, sb.superDepth, fieldName) if !found { return nil, i.Error(fmt.Sprintf("Field does not exist: %s", fieldName), trace) } fieldSelfBinding := selfBinding{self: sb.self, superDepth: foundAt} return field.field.evaluate(i, trace, fieldSelfBinding, upValues, fieldName) } func objectHasField(sb selfBinding, fieldName string, h Hidden) bool { found, field, _, _ := findField(sb.self, sb.superDepth, fieldName) if !found || (h == withoutHidden && field.hide == ast.ObjectFieldHidden) { return false } return true } type fieldHideMap map[string]ast.ObjectFieldHide func objectFieldsVisibility(obj valueObject) fieldHideMap
// Returns field names of an object. Gotcha: the order of fields is unpredictable. func objectFields(obj valueObject, h Hidden) []string { var r []string for fieldName, hide := range objectFieldsVisibility(obj) { if h == withHidden || hide != ast.ObjectFieldHidden { r = append(r, fieldName) } } return r } func duplicateFieldNameErrMsg(fieldName string) string { return fmt.Sprintf("Duplicate field name: %s", unparseString(fieldName)) }
{ r := make(fieldHideMap) switch obj := obj.(type) { case *valueExtendedObject: r = objectFieldsVisibility(obj.left) rightMap := objectFieldsVisibility(obj.right) for k, v := range rightMap { if v == ast.ObjectFieldInherit { if _, alreadyExists := r[k]; !alreadyExists { r[k] = v } } else { r[k] = v } } return r case *valueSimpleObject: for fieldName, field := range obj.fields { r[fieldName] = field.hide } } return r }
test_cli.py
from macaque import cli
def test_cli_template(): assert cli.cli() is None
InfClassObject.py
## @file # This file is used to define each component of INF file # # Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR> # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules # import Common.LongFilePathOs as os import re import EdkLogger from CommonDataClass.CommonClass import LibraryClassClass from CommonDataClass.ModuleClass import * from String import * from DataType import * from Identification import * from Dictionary import * from BuildToolError import * from Misc import sdict import GlobalData from Table.TableInf import TableInf import Database from Parsing import * from Common.LongFilePathSupport import OpenLongFilePath as open # # Global variable # Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN, TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER, TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION, TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE, TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE, TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS, TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE, TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE, TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD, TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE, TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG, TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX, TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC, TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE, TAB_GUIDS.upper() : MODEL_EFI_GUID, TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL, TAB_PPIS.upper() : MODEL_EFI_PPI, TAB_DEPEX.upper() : MODEL_EFI_DEPEX, TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE, TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION } gComponentType2ModuleType = {
"SECURITY_CORE" : "SEC", "PEI_CORE" : "PEI_CORE", "COMBINED_PEIM_DRIVER" : "PEIM", "PIC_PEIM" : "PEIM", "RELOCATABLE_PEIM" : "PEIM", "PE32_PEIM" : "PEIM", "BS_DRIVER" : "DXE_DRIVER", "RT_DRIVER" : "DXE_RUNTIME_DRIVER", "SAL_RT_DRIVER" : "DXE_SAL_DRIVER", "APPLICATION" : "UEFI_APPLICATION", "LOGO" : "BASE", } gNmakeFlagPattern = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE) gNmakeFlagName2ToolCode = { "C" : "CC", "LIB" : "SLINK", "LINK" : "DLINK", } class InfHeader(ModuleHeaderClass): _Mapping_ = { # # Required Fields # TAB_INF_DEFINES_BASE_NAME : "Name", TAB_INF_DEFINES_FILE_GUID : "Guid", TAB_INF_DEFINES_MODULE_TYPE : "ModuleType", TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "UefiSpecificationVersion", TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION : "UefiSpecificationVersion", TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion", # # Optional Fields # TAB_INF_DEFINES_INF_VERSION : "InfVersion", TAB_INF_DEFINES_BINARY_MODULE : "BinaryModule", TAB_INF_DEFINES_COMPONENT_TYPE : "ComponentType", TAB_INF_DEFINES_MAKEFILE_NAME : "MakefileName", TAB_INF_DEFINES_BUILD_NUMBER : "BuildNumber", TAB_INF_DEFINES_BUILD_TYPE : "BuildType", TAB_INF_DEFINES_FFS_EXT : "FfsExt", TAB_INF_DEFINES_FV_EXT : "FvExt", TAB_INF_DEFINES_SOURCE_FV : "SourceFv", TAB_INF_DEFINES_VERSION_NUMBER : "VersionNumber", TAB_INF_DEFINES_VERSION_STRING : "VersionString", TAB_INF_DEFINES_VERSION : "Version", TAB_INF_DEFINES_PCD_IS_DRIVER : "PcdIsDriver", TAB_INF_DEFINES_TIANO_EDK_FLASHMAP_H : "TianoEdkFlashMap_h", TAB_INF_DEFINES_SHADOW : "Shadow", # TAB_INF_DEFINES_LIBRARY_CLASS : "LibraryClass", # TAB_INF_DEFINES_ENTRY_POINT : "ExternImages", # TAB_INF_DEFINES_UNLOAD_IMAGE : "ExternImages", # TAB_INF_DEFINES_CONSTRUCTOR : , # TAB_INF_DEFINES_DESTRUCTOR : , # TAB_INF_DEFINES_DEFINE : "Define", # TAB_INF_DEFINES_SPEC : "Specification", # TAB_INF_DEFINES_CUSTOM_MAKEFILE : "CustomMakefile", # TAB_INF_DEFINES_MACRO : } def __init__(self): ModuleHeaderClass.__init__(self) self.VersionNumber = '' self.VersionString = '' #print self.__dict__ def __setitem__(self, key, value): self.__dict__[self._Mapping_[key]] = value def __getitem__(self, key): return self.__dict__[self._Mapping_[key]] ## "in" test support def __contains__(self, key): return key in self._Mapping_ ## InfObject # # This class defined basic Inf object which is used by inheriting # # @param object: Inherited from object class # class InfObject(object): def __init__(self): object.__init__() ## Inf # # This class defined the structure used in Inf object # # @param InfObject: Inherited from InfObject class # @param Ffilename: Input value for Ffilename of Inf file, default is None # @param IsMergeAllArches: Input value for IsMergeAllArches # True is to merge all arches # Fales is not to merge all arches # default is False # @param IsToModule: Input value for IsToModule # True is to transfer to ModuleObject automatically # False is not to transfer to ModuleObject automatically # default is False # @param WorkspaceDir: Input value for current workspace directory, default is None # # @var Identification: To store value for Identification, it is a structure as Identification # @var UserExtensions: To store value for UserExtensions # @var Module: To store value for Module, it is a structure as ModuleClass # @var WorkspaceDir: To store value for WorkspaceDir # @var KeyList: To store value for KeyList, a list for all Keys used in Inf # class Inf(InfObject): def __init__(self, Filename=None, IsToDatabase=False, IsToModule=False, WorkspaceDir=None, Database=None, SupArchList=DataType.ARCH_LIST): self.Identification = Identification() self.Module = ModuleClass() self.UserExtensions = '' self.WorkspaceDir = WorkspaceDir self.SupArchList = SupArchList self.IsToDatabase = IsToDatabase self.Cur = Database.Cur self.TblFile = Database.TblFile self.TblInf = Database.TblInf self.FileID = -1 #self.TblInf = TableInf(Database.Cur) self.KeyList = [ TAB_SOURCES, TAB_BUILD_OPTIONS, TAB_BINARIES, TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, TAB_PACKAGES, TAB_LIBRARIES, TAB_INF_FIXED_PCD, TAB_INF_PATCH_PCD, TAB_INF_FEATURE_PCD, TAB_INF_PCD, TAB_INF_PCD_EX, TAB_DEPEX, TAB_NMAKE, TAB_INF_DEFINES ] # # Upper all KEYs to ignore case sensitive when parsing # self.KeyList = map(lambda c: c.upper(), self.KeyList) # # Init RecordSet # self.RecordSet = {} for Key in self.KeyList: self.RecordSet[Section[Key]] = [] # # Load Inf file if filename is not None # if Filename != None: self.LoadInfFile(Filename) # # Transfer to Module Object if IsToModule is True # if IsToModule: self.InfToModule() ## Transfer to Module Object # # Transfer all contents of an Inf file to a standard Module Object # def InfToModule(self): # # Init global information for the file # ContainerFile = self.Identification.FileFullPath # # Generate Package Header # self.GenModuleHeader(ContainerFile) # # Generate BuildOptions # self.GenBuildOptions(ContainerFile) # # Generate Includes # self.GenIncludes(ContainerFile) # # Generate Libraries # self.GenLibraries(ContainerFile) # # Generate LibraryClasses # self.GenLibraryClasses(ContainerFile) # # Generate Packages # self.GenPackages(ContainerFile) # # Generate Nmakes # self.GenNmakes(ContainerFile) # # Generate Pcds # self.GenPcds(ContainerFile) # # Generate Sources # self.GenSources(ContainerFile) # # Generate UserExtensions # self.GenUserExtensions(ContainerFile) # # Generate Guids # self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile) # # Generate Protocols # self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile) # # Generate Ppis # self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile) # # Generate Depexes # self.GenDepexes(ContainerFile) # # Generate Binaries # self.GenBinaries(ContainerFile) ## Parse [Defines] section # # Parse [Defines] section into InfDefines object # # @param InfFile The path of the INF file # @param Section The title of "Defines" section # @param Lines The content of "Defines" section # def ParseDefines(self, InfFile, Section, Lines): TokenList = Section.split(TAB_SPLIT) if len(TokenList) == 3: RaiseParserError(Section, "Defines", InfFile, "[xx.yy.%s] format (with platform) is not supported") if len(TokenList) == 2: Arch = TokenList[1].upper() else: Arch = TAB_ARCH_COMMON if Arch not in self.Defines: self.Defines[Arch] = InfDefines() GetSingleValueOfKeyFromLines(Lines, self.Defines[Arch].DefinesDictionary, TAB_COMMENT_SPLIT, TAB_EQUAL_SPLIT, False, None) ## Load Inf file # # Load the file if it exists # # @param Filename: Input value for filename of Inf file # def LoadInfFile(self, Filename): # # Insert a record for file # Filename = NormPath(Filename) self.Identification.FileFullPath = Filename (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename) self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF) # # Init InfTable # #self.TblInf.Table = "Inf%s" % self.FileID #self.TblInf.Create() # # Init common datas # IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \ [], [], TAB_UNKNOWN, [], [], [] LineNo = 0 # # Parse file content # IsFindBlockComment = False ReservedLine = '' for Line in open(Filename, 'r'): LineNo = LineNo + 1 # # Remove comment block # if Line.find(TAB_COMMENT_EDK_START) > -1: ReservedLine = GetSplitList(Line, TAB_COMMENT_EDK_START, 1)[0] IsFindBlockComment = True if Line.find(TAB_COMMENT_EDK_END) > -1: Line = ReservedLine + GetSplitList(Line, TAB_COMMENT_EDK_END, 1)[1] ReservedLine = '' IsFindBlockComment = False if IsFindBlockComment: continue # # Remove comments at tail and remove spaces again # Line = CleanString(Line) if Line == '': continue # # Find a new section tab # First insert previous section items # And then parse the content of the new section # if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END): if Line[1:3] == "--": continue Model = Section[CurrentSection.upper()] # # Insert items data of previous section # InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet) # # Parse the new section # SectionItemList = [] ArchList = [] ThirdList = [] CurrentSection = '' LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT) for Item in LineList: ItemList = GetSplitValueList(Item, TAB_SPLIT) if CurrentSection == '': CurrentSection = ItemList[0] else: if CurrentSection != ItemList[0]: EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError) if CurrentSection.upper() not in self.KeyList: RaiseParserError(Line, CurrentSection, Filename, '', LineNo) CurrentSection = TAB_UNKNOWN continue ItemList.append('') ItemList.append('') if len(ItemList) > 5: RaiseParserError(Line, CurrentSection, Filename, '', LineNo) else: if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL: EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError) ArchList.append(ItemList[1].upper()) ThirdList.append(ItemList[2]) continue # # Not in any defined section # if CurrentSection == TAB_UNKNOWN: ErrorMsg = "%s is not in any defined section" % Line EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError) # # Add a section item # SectionItemList.append([Line, LineNo]) # End of parse #End of For # # Insert items data of last section # Model = Section[CurrentSection.upper()] InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet) # # Replace all DEFINE macros with its actual values # ParseDefineMacro2(self.TblInf, self.RecordSet, GlobalData.gGlobalDefines) ## Show detailed information of Module # # Print all members and their values of Module class # def ShowModule(self): M = self.Module for Arch in M.Header.keys(): print '\nArch =', Arch print 'Filename =', M.Header[Arch].FileName print 'FullPath =', M.Header[Arch].FullPath print 'BaseName =', M.Header[Arch].Name print 'Guid =', M.Header[Arch].Guid print 'Version =', M.Header[Arch].Version print 'InfVersion =', M.Header[Arch].InfVersion print 'UefiSpecificationVersion =', M.Header[Arch].UefiSpecificationVersion print 'EdkReleaseVersion =', M.Header[Arch].EdkReleaseVersion print 'ModuleType =', M.Header[Arch].ModuleType print 'BinaryModule =', M.Header[Arch].BinaryModule print 'ComponentType =', M.Header[Arch].ComponentType print 'MakefileName =', M.Header[Arch].MakefileName print 'BuildNumber =', M.Header[Arch].BuildNumber print 'BuildType =', M.Header[Arch].BuildType print 'FfsExt =', M.Header[Arch].FfsExt print 'FvExt =', M.Header[Arch].FvExt print 'SourceFv =', M.Header[Arch].SourceFv print 'PcdIsDriver =', M.Header[Arch].PcdIsDriver print 'TianoEdkFlashMap_h =', M.Header[Arch].TianoEdkFlashMap_h print 'Shadow =', M.Header[Arch].Shadow print 'LibraryClass =', M.Header[Arch].LibraryClass for Item in M.Header[Arch].LibraryClass: print Item.LibraryClass, DataType.TAB_VALUE_SPLIT.join(Item.SupModuleList) print 'CustomMakefile =', M.Header[Arch].CustomMakefile print 'Define =', M.Header[Arch].Define print 'Specification =', M.Header[Arch].Specification for Item in self.Module.ExternImages: print '\nEntry_Point = %s, UnloadImage = %s' % (Item.ModuleEntryPoint, Item.ModuleUnloadImage) for Item in self.Module.ExternLibraries: print 'Constructor = %s, Destructor = %s' % (Item.Constructor, Item.Destructor) print '\nBuildOptions =', M.BuildOptions for Item in M.BuildOptions: print Item.ToolChainFamily, Item.ToolChain, Item.Option, Item.SupArchList print '\nIncludes =', M.Includes for Item in M.Includes: print Item.FilePath, Item.SupArchList print '\nLibraries =', M.Libraries for Item in M.Libraries: print Item.Library, Item.SupArchList print '\nLibraryClasses =', M.LibraryClasses for Item in M.LibraryClasses: print Item.LibraryClass, Item.RecommendedInstance, Item.FeatureFlag, Item.SupModuleList, Item.SupArchList, Item.Define print '\nPackageDependencies =', M.PackageDependencies for Item in M.PackageDependencies: print Item.FilePath, Item.SupArchList, Item.FeatureFlag print '\nNmake =', M.Nmake for Item in M.Nmake: print Item.Name, Item.Value, Item.SupArchList print '\nPcds =', M.PcdCodes for Item in M.PcdCodes: print '\tCName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, Item.SupArchList print '\nSources =', M.Sources for Source in M.Sources: print Source.SourceFile, 'Fam=', Source.ToolChainFamily, 'Pcd=', Source.FeatureFlag, 'Tag=', Source.TagName, 'ToolCode=', Source.ToolCode, Source.SupArchList print '\nUserExtensions =', M.UserExtensions for UserExtension in M.UserExtensions: print UserExtension.UserID, UserExtension.Identifier, UserExtension.Content print '\nGuids =', M.Guids for Item in M.Guids: print Item.CName, Item.SupArchList, Item.FeatureFlag print '\nProtocols =', M.Protocols for Item in M.Protocols: print Item.CName, Item.SupArchList, Item.FeatureFlag print '\nPpis =', M.Ppis for Item in M.Ppis: print Item.CName, Item.SupArchList, Item.FeatureFlag print '\nDepex =', M.Depex for Item in M.Depex: print Item.Depex, Item.SupArchList, Item.Define print '\nBinaries =', M.Binaries for Binary in M.Binaries: print 'Type=', Binary.FileType, 'Target=', Binary.Target, 'Name=', Binary.BinaryFile, 'FeatureFlag=', Binary.FeatureFlag, 'SupArchList=', Binary.SupArchList ## Convert [Defines] section content to ModuleHeaderClass # # Convert [Defines] section content to ModuleHeaderClass # # @param Defines The content under [Defines] section # @param ModuleHeader An object of ModuleHeaderClass # @param Arch The supported ARCH # def GenModuleHeader(self, ContainerFile): EdkLogger.debug(2, "Generate ModuleHeader ...") File = self.Identification.FileFullPath # # Update all defines item in database # RecordSet = self.RecordSet[MODEL_META_DATA_HEADER] for Record in RecordSet: ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT) if len(ValueList) != 2: RaiseParserError(Record[0], 'Defines', ContainerFile, '<Key> = <Value>', Record[2]) ID, Value1, Value2, Arch, LineNo = Record[3], ValueList[0], ValueList[1], Record[1], Record[2] SqlCommand = """update %s set Value1 = '%s', Value2 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID) self.TblInf.Exec(SqlCommand) for Arch in DataType.ARCH_LIST: ModuleHeader = InfHeader() ModuleHeader.FileName = self.Identification.FileName ModuleHeader.FullPath = self.Identification.FileFullPath DefineList = QueryDefinesItem2(self.TblInf, Arch, self.FileID) NotProcessedDefineList = [] for D in DefineList: if D[0] in ModuleHeader: ModuleHeader[D[0]] = GetSplitValueList(D[1])[0] else: NotProcessedDefineList.append(D) if ModuleHeader.ComponentType == "LIBRARY": Lib = LibraryClassClass() Lib.LibraryClass = ModuleHeader.Name Lib.SupModuleList = DataType.SUP_MODULE_LIST ModuleHeader.LibraryClass.append(Lib) # we need to make some key defines resolved first for D in NotProcessedDefineList: if D[0] == TAB_INF_DEFINES_LIBRARY_CLASS: List = GetSplitValueList(D[1], DataType.TAB_VALUE_SPLIT, 1) Lib = LibraryClassClass() Lib.LibraryClass = CleanString(List[0]) if len(List) == 1: Lib.SupModuleList = DataType.SUP_MODULE_LIST elif len(List) == 2: Lib.SupModuleList = GetSplitValueList(CleanString(List[1]), ' ') ModuleHeader.LibraryClass.append(Lib) elif D[0] == TAB_INF_DEFINES_CUSTOM_MAKEFILE: List = D[1].split(DataType.TAB_VALUE_SPLIT) if len(List) == 2: ModuleHeader.CustomMakefile[CleanString(List[0])] = CleanString(List[1]) else: RaiseParserError(D[1], 'CUSTOM_MAKEFILE of Defines', File, 'CUSTOM_MAKEFILE=<Family>|<Filename>', D[2]) elif D[0] == TAB_INF_DEFINES_ENTRY_POINT: Image = ModuleExternImageClass() Image.ModuleEntryPoint = CleanString(D[1]) self.Module.ExternImages.append(Image) elif D[0] == TAB_INF_DEFINES_UNLOAD_IMAGE: Image = ModuleExternImageClass() Image.ModuleUnloadImage = CleanString(D[1]) self.Module.ExternImages.append(Image) elif D[0] == TAB_INF_DEFINES_CONSTRUCTOR: LibraryClass = ModuleExternLibraryClass() LibraryClass.Constructor = CleanString(D[1]) self.Module.ExternLibraries.append(LibraryClass) elif D[0] == TAB_INF_DEFINES_DESTRUCTOR: LibraryClass = ModuleExternLibraryClass() LibraryClass.Destructor = CleanString(D[1]) self.Module.ExternLibraries.append(LibraryClass) elif D[0] == TAB_INF_DEFINES_DEFINE: List = D[1].split(DataType.TAB_EQUAL_SPLIT) if len(List) != 2: RaiseParserError(Item, 'DEFINE of Defines', File, 'DEFINE <Word> = <Word>', D[2]) else: ModuleHeader.Define[CleanString(List[0])] = CleanString(List[1]) elif D[0] == TAB_INF_DEFINES_SPEC: List = D[1].split(DataType.TAB_EQUAL_SPLIT) if len(List) != 2: RaiseParserError(Item, 'SPEC of Defines', File, 'SPEC <Word> = <Version>', D[2]) else: ModuleHeader.Specification[CleanString(List[0])] = CleanString(List[1]) # # Get version of INF # if ModuleHeader.InfVersion != "": # EdkII inf VersionNumber = ModuleHeader.VersionNumber VersionString = ModuleHeader.VersionString if len(VersionNumber) > 0 and len(VersionString) == 0: EdkLogger.warn(2000, 'VERSION_NUMBER depricated; INF file %s should be modified to use VERSION_STRING instead.' % self.Identification.FileFullPath) ModuleHeader.Version = VersionNumber if len(VersionString) > 0: if len(VersionNumber) > 0: EdkLogger.warn(2001, 'INF file %s defines both VERSION_NUMBER and VERSION_STRING, using VERSION_STRING' % self.Identification.FileFullPath) ModuleHeader.Version = VersionString else: # Edk inf ModuleHeader.InfVersion = "0x00010000" if ModuleHeader.ComponentType in gComponentType2ModuleType: ModuleHeader.ModuleType = gComponentType2ModuleType[ModuleHeader.ComponentType] elif ModuleHeader.ComponentType != '': EdkLogger.error("Parser", PARSER_ERROR, "Unsupported Edk component type [%s]" % ModuleHeader.ComponentType, ExtraData=File, RaiseError=EdkLogger.IsRaiseError) self.Module.Header[Arch] = ModuleHeader ## GenBuildOptions # # Gen BuildOptions of Inf # [<Family>:]<ToolFlag>=Flag # # @param ContainerFile: The Inf file full path # def GenBuildOptions(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS) BuildOptions = {} # # Get all BuildOptions # RecordSet = self.RecordSet[MODEL_META_DATA_BUILD_OPTION] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (Family, ToolChain, Flag) = GetBuildOption(Record[0], ContainerFile, Record[2]) MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch) # # Update to Database # if self.IsToDatabase: SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Family), ConvertToSqlString2(ToolChain), ConvertToSqlString2(Flag), Record[3]) self.TblInf.Exec(SqlCommand) for Key in BuildOptions.keys(): BuildOption = BuildOptionClass(Key[0], Key[1], Key[2]) BuildOption.SupArchList = BuildOptions[Key] self.Module.BuildOptions.append(BuildOption) ## GenIncludes # # Gen Includes of Inf # # # @param ContainerFile: The Inf file full path # def GenIncludes(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES) Includes = sdict() # # Get all Includes # RecordSet = self.RecordSet[MODEL_EFI_INCLUDE] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: MergeArches(Includes, Record[0], Arch) for Key in Includes.keys(): Include = IncludeClass() Include.FilePath = NormPath(Key) Include.SupArchList = Includes[Key] self.Module.Includes.append(Include) ## GenLibraries # # Gen Libraries of Inf # # # @param ContainerFile: The Inf file full path # def GenLibraries(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARIES) Libraries = sdict() # # Get all Includes # RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_INSTANCE] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: MergeArches(Libraries, Record[0], Arch) for Key in Libraries.keys(): Library = ModuleLibraryClass() # replace macro and remove file extension Library.Library = Key.rsplit('.', 1)[0] Library.SupArchList = Libraries[Key] self.Module.Libraries.append(Library) ## GenLibraryClasses # # Get LibraryClass of Inf # <LibraryClassKeyWord>|<LibraryInstance> # # @param ContainerFile: The Inf file full path # def GenLibraryClasses(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES) LibraryClasses = {} # # Get all LibraryClasses # RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (LibClassName, LibClassIns, Pcd, SupModelList) = GetLibraryClassOfInf([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2]) MergeArches(LibraryClasses, (LibClassName, LibClassIns, Pcd, SupModelList), Arch) # # Update to Database # if self.IsToDatabase: SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(LibClassName), ConvertToSqlString2(LibClassIns), ConvertToSqlString2(SupModelList), Record[3]) self.TblInf.Exec(SqlCommand) for Key in LibraryClasses.keys(): KeyList = Key[0].split(DataType.TAB_VALUE_SPLIT) LibraryClass = LibraryClassClass() LibraryClass.LibraryClass = Key[0] LibraryClass.RecommendedInstance = NormPath(Key[1]) LibraryClass.FeatureFlag = Key[2] LibraryClass.SupArchList = LibraryClasses[Key] LibraryClass.SupModuleList = GetSplitValueList(Key[3]) self.Module.LibraryClasses.append(LibraryClass) ## GenPackages # # Gen Packages of Inf # # # @param ContainerFile: The Inf file full path # def GenPackages(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_PACKAGES) Packages = {} # # Get all Packages # RecordSet = self.RecordSet[MODEL_META_DATA_PACKAGE] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (Package, Pcd) = GetPackage(Record[0], ContainerFile, self.WorkspaceDir, Record[2]) MergeArches(Packages, (Package, Pcd), Arch) if self.IsToDatabase: SqlCommand = """update %s set Value1 = '%s', Value2 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Package), ConvertToSqlString2(Pcd), Record[3]) self.TblInf.Exec(SqlCommand) for Key in Packages.keys(): Package = ModulePackageDependencyClass() Package.FilePath = NormPath(Key[0]) Package.SupArchList = Packages[Key] Package.FeatureFlag = Key[1] self.Module.PackageDependencies.append(Package) ## GenNmakes # # Gen Nmakes of Inf # # # @param ContainerFile: The Inf file full path # def GenNmakes(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_NMAKE) Nmakes = sdict() # # Get all Nmakes # RecordSet = self.RecordSet[MODEL_META_DATA_NMAKE] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: MergeArches(Nmakes, Record[0], Arch) for Key in Nmakes.keys(): List = GetSplitValueList(Key, DataType.TAB_EQUAL_SPLIT, MaxSplit=1) if len(List) != 2: RaiseParserError(Key, 'Nmake', ContainerFile, '<MacroName> = <Value>') continue Nmake = ModuleNmakeClass() Nmake.Name = List[0] Nmake.Value = List[1] Nmake.SupArchList = Nmakes[Key] self.Module.Nmake.append(Nmake) # convert Edk format to EdkII format if Nmake.Name == "IMAGE_ENTRY_POINT": Image = ModuleExternImageClass() Image.ModuleEntryPoint = Nmake.Value self.Module.ExternImages.append(Image) elif Nmake.Name == "DPX_SOURCE": Source = ModuleSourceFileClass(NormPath(Nmake.Value), "", "", "", "", Nmake.SupArchList) self.Module.Sources.append(Source) else: ToolList = gNmakeFlagPattern.findall(Nmake.Name) if len(ToolList) == 0 or len(ToolList) != 1: EdkLogger.warn("\nParser", "Don't know how to do with MACRO: %s" % Nmake.Name, ExtraData=ContainerFile) else: if ToolList[0] in gNmakeFlagName2ToolCode: Tool = gNmakeFlagName2ToolCode[ToolList[0]] else: Tool = ToolList[0] BuildOption = BuildOptionClass("MSFT", "*_*_*_%s_FLAGS" % Tool, Nmake.Value) BuildOption.SupArchList = Nmake.SupArchList self.Module.BuildOptions.append(BuildOption) ## GenPcds # # Gen Pcds of Inf # <TokenSpaceGuidCName>.<PcdCName>[|<Value>] # # @param ContainerFile: The Dec file full path # def GenPcds(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS) Pcds = {} PcdToken = {} # # Get all Guids # RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD] RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE] RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG] RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX] RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet1: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: if self.Module.Header[Arch].LibraryClass != {}: pass (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2]) MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch) PcdToken[Record[3]] = (TokenGuidCName, TokenName) for Record in RecordSet2: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2]) MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch) PcdToken[Record[3]] = (TokenGuidCName, TokenName) for Record in RecordSet3: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2]) MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch) PcdToken[Record[3]] = (TokenGuidCName, TokenName) for Record in RecordSet4: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2]) MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch) PcdToken[Record[3]] = (TokenGuidCName, TokenName) for Record in RecordSet5: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], "", ContainerFile, Record[2]) MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch) PcdToken[Record[3]] = (TokenGuidCName, TokenName) # # Update to database # if self.IsToDatabase: for Key in PcdToken.keys(): SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblInf.Table, ".".join((PcdToken[Key][0], PcdToken[Key][1])), Key) self.TblInf.Exec(SqlCommand) for Key in Pcds.keys(): Pcd = PcdClass() Pcd.CName = Key[1] Pcd.TokenSpaceGuidCName = Key[0] Pcd.DefaultValue = Key[2] Pcd.ItemType = Key[3] Pcd.SupArchList = Pcds[Key] self.Module.PcdCodes.append(Pcd) ## GenSources # # Gen Sources of Inf # <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]] # # @param ContainerFile: The Dec file full path # def GenSources(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_SOURCES) Sources = {} # # Get all Nmakes # RecordSet = self.RecordSet[MODEL_EFI_SOURCE_FILE] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (Filename, Family, TagName, ToolCode, Pcd) = GetSource(Record[0], ContainerFile, self.Identification.FileRelativePath, Record[2]) MergeArches(Sources, (Filename, Family, TagName, ToolCode, Pcd), Arch) if self.IsToDatabase: SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s', Value4 = '%s', Value5 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Filename), ConvertToSqlString2(Family), ConvertToSqlString2(TagName), ConvertToSqlString2(ToolCode), ConvertToSqlString2(Pcd), Record[3]) self.TblInf.Exec(SqlCommand) for Key in Sources.keys(): Source = ModuleSourceFileClass(Key[0], Key[2], Key[3], Key[1], Key[4], Sources[Key]) self.Module.Sources.append(Source) ## GenUserExtensions # # Gen UserExtensions of Inf # def GenUserExtensions(self, ContainerFile): # # # # UserExtensions # # # if self.UserExtensions != '': # UserExtension = UserExtensionsClass() # Lines = self.UserExtensions.splitlines() # List = GetSplitValueList(Lines[0], DataType.TAB_SPLIT, 2) # if len(List) != 3: # RaiseParserError(Lines[0], 'UserExtensions', File, "UserExtensions.UserId.'Identifier'") # else: # UserExtension.UserID = List[1] # UserExtension.Identifier = List[2][0:-1].replace("'", '').replace('\"', '') # for Line in Lines[1:]: # UserExtension.Content = UserExtension.Content + CleanString(Line) + '\n' # self.Module.UserExtensions.append(UserExtension) pass ## GenDepexes # # Gen Depex of Inf # # @param ContainerFile: The Inf file full path # def GenDepexes(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_DEPEX) Depex = {} # # Get all Depexes # RecordSet = self.RecordSet[MODEL_EFI_DEPEX] # # Go through each arch # for Arch in self.SupArchList: Line = '' for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: Line = Line + Record[0] + ' ' if Line != '': MergeArches(Depex, Line, Arch) for Key in Depex.keys(): Dep = ModuleDepexClass() Dep.Depex = Key Dep.SupArchList = Depex[Key] self.Module.Depex.append(Dep) ## GenBinaries # # Gen Binary of Inf # <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>] # # @param ContainerFile: The Dec file full path # def GenBinaries(self, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % TAB_BINARIES) Binaries = {} # # Get all Guids # RecordSet = self.RecordSet[MODEL_EFI_BINARY_FILE] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (FileType, Filename, Target, Pcd) = GetBinary(Record[0], ContainerFile, self.Identification.FileRelativePath, Record[2]) MergeArches(Binaries, (FileType, Filename, Target, Pcd), Arch) if self.IsToDatabase: SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s', Value4 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(FileType), ConvertToSqlString2(Filename), ConvertToSqlString2(Target), ConvertToSqlString2(Pcd), Record[3]) self.TblInf.Exec(SqlCommand) for Key in Binaries.keys(): Binary = ModuleBinaryFileClass(NormPath(Key[1]), Key[0], Key[2], Key[3], Binaries[Key]) self.Module.Binaries.append(Binary) ## GenGuids # # Gen Guids of Inf # <CName>=<GuidValue> # # @param ContainerFile: The Inf file full path # def GenGuidProtocolPpis(self, Type, ContainerFile): EdkLogger.debug(2, "Generate %s ..." % Type) Lists = {} # # Get all Items # RecordSet = self.RecordSet[Section[Type.upper()]] # # Go through each arch # for Arch in self.SupArchList: for Record in RecordSet: if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON: (Name, Value) = GetGuidsProtocolsPpisOfInf(Record[0], Type, ContainerFile, Record[2]) MergeArches(Lists, (Name, Value), Arch) if self.IsToDatabase: SqlCommand = """update %s set Value1 = '%s', Value2 = '%s' where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Name), ConvertToSqlString2(Value), Record[3]) self.TblInf.Exec(SqlCommand) ListMember = None if Type == TAB_GUIDS: ListMember = self.Module.Guids elif Type == TAB_PROTOCOLS: ListMember = self.Module.Protocols elif Type == TAB_PPIS: ListMember = self.Module.Ppis for Key in Lists.keys(): ListClass = GuidProtocolPpiCommonClass() ListClass.CName = Key[0] ListClass.SupArchList = Lists[Key] ListClass.FeatureFlag = Key[1] ListMember.append(ListClass) ## # # This acts like the main() function for the script, unless it is 'import'ed into another # script. # if __name__ == '__main__': EdkLogger.Initialize() EdkLogger.SetLevel(EdkLogger.DEBUG_0) W = os.getenv('WORKSPACE') F = os.path.join(W, 'MdeModulePkg/Application/HelloWorld/HelloWorld.inf') Db = Database.Database('Inf.db') Db.InitDatabase() P = Inf(os.path.normpath(F), True, True, W, Db) P.ShowModule() Db.Close()
"LIBRARY" : "BASE",
buffer.go
package magic import ( "crypto/rand" "encoding/binary" "io" "sync" ) const maxConnection = 16 const bufSize = 64*1024 - 8 const tableSize = 65536 type GlobalBufferTable map[[16]byte](*bufferNode) type bufferNode struct { Chan chan dataBlock WG sync.WaitGroup ExitSignals []chan bool Lock sync.Mutex } func makeBufferNode() bufferNode
type dataBlock struct { Data []byte Size uint32 BlockId uint32 } func (dataBlock dataBlock) Pack() []byte { packedData := make([]byte, 8+dataBlock.Size) binary.LittleEndian.PutUint32(packedData[:], dataBlock.BlockId) binary.LittleEndian.PutUint32(packedData[4:], dataBlock.Size) copy(packedData[8:], dataBlock.Data) return packedData } // Create a new key-value and return the key func (gbt *GlobalBufferTable) New() [16]byte { var key [16]byte for { io.ReadFull(rand.Reader, key[:]) if _, exist := (*gbt)[key]; !exist { bufferNode := makeBufferNode() (*gbt)[key] = &bufferNode return key } } } // Delete a key-value func (gbt *GlobalBufferTable) Free(key [16]byte) { if _, ok := (*gbt)[key]; !ok { return } delete(*gbt, key) } func joinBlocks(inData, outData chan dataBlock, exitSignal, taskFinish chan bool) { table := make(map[uint32]dataBlock) var pointer uint32 = 0 for { select { case db := <-inData: table[db.BlockId%tableSize] = db if pointer != db.BlockId%tableSize { continue } for { if d, exist := table[pointer]; exist { outData <- d delete(table, pointer) pointer = (pointer + 1) % tableSize continue } break } case s := <-exitSignal: if s { return } for { select { case db := <-inData: table[db.BlockId%tableSize] = db if pointer != db.BlockId%tableSize { continue } for { if d, exist := table[pointer]; exist { outData <- d delete(table, pointer) pointer = (pointer + 1) % tableSize continue } break } default: taskFinish <- true return } } } } } func blockJoiner() (chan dataBlock, chan dataBlock, chan bool, chan bool) { dataBlocks := make(chan dataBlock, maxConnection*2) continuousData := make(chan dataBlock, maxConnection*2) exitJoinBlock := make(chan bool, 2) finishSignal := make(chan bool, 2) go joinBlocks(dataBlocks, continuousData, exitJoinBlock, finishSignal) return dataBlocks, continuousData, exitJoinBlock, finishSignal }
{ var wg sync.WaitGroup var lock sync.Mutex return bufferNode{ make(chan dataBlock, maxConnection*2), wg, make([]chan bool, 0), lock, } }
identityconfig.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package msp import ( "strconv" "strings" "github.com/pkg/errors" "regexp" "io/ioutil" "app/service/fabric-sdk-go-gm/pkg/common/providers/core" "app/service/fabric-sdk-go-gm/pkg/common/providers/msp" "app/service/fabric-sdk-go-gm/pkg/core/config/endpoint" "app/service/fabric-sdk-go-gm/pkg/core/config/lookup" logApi "app/service/fabric-sdk-go-gm/pkg/core/logging/api" fabImpl "app/service/fabric-sdk-go-gm/pkg/fab" "app/service/fabric-sdk-go-gm/pkg/util/pathvar" ) var defaultCAServerSchema = "https" var defaultCAServerListenPort = 7054 //ConfigFromBackend returns identity config implementation of given backend func ConfigFromBackend(coreBackend ...core.ConfigBackend) (msp.IdentityConfig, error) { //create identity config config := &IdentityConfig{backend: lookup.New(coreBackend...)} //preload config identities err := config.loadIdentityConfigEntities() if err != nil { return nil, errors.WithMessage(err, "failed to create identity config from backends") } return config, nil } // IdentityConfig represents the identity configuration for the client type IdentityConfig struct { client *msp.ClientConfig caConfigs map[string]*msp.CAConfig backend *lookup.ConfigLookup caKeyStorePath string credentialStorePath string caMatchers []matcherEntry } //entityMatchers for identity configuration type entityMatchers struct { matchers map[string][]MatchConfig } //matcher entry mapping regex to match config type matcherEntry struct { regex *regexp.Regexp matchConfig MatchConfig } //identityConfigEntity contains all config definitions needed type identityConfigEntity struct { Client ClientConfig Organizations map[string]fabImpl.OrganizationConfig CertificateAuthorities map[string]CAConfig } // ClientConfig defines client configuration in identity config type ClientConfig struct { Organization string Logging logApi.LoggingType CryptoConfig msp.CCType TLSCerts ClientTLSConfig CredentialStore msp.CredentialStoreType } //ClientTLSConfig defines client TLS configuration in identity config type ClientTLSConfig struct { //Client TLS information Client endpoint.TLSKeyPair } // CAConfig defines a CA configuration in identity config type CAConfig struct { ID string URL string GRPCOptions map[string]interface{} TLSCACerts endpoint.MutualTLSConfig Registrar msp.EnrollCredentials CAName string } // MatchConfig contains match pattern and substitution pattern // for pattern matching of network configured hostnames or channel names with static config type MatchConfig struct { Pattern string // these are used for hostname mapping URLSubstitutionExp string SSLTargetOverrideURLSubstitutionExp string MappedHost string // this is used for Name mapping instead of hostname mappings MappedName string //IgnoreEndpoint option to exclude given entity from any kind of search or from entity list IgnoreEndpoint bool } // Client returns the Client config func (c *IdentityConfig) Client() *msp.ClientConfig { return c.client } // CAConfig returns the CA configuration. func (c *IdentityConfig) CAConfig(caID string) (*msp.CAConfig, bool) { cfg, ok := c.caConfigs[strings.ToLower(caID)] return cfg, ok } //CAClientCert read configuration for the fabric CA client cert bytes for given org func (c *IdentityConfig) CAClientCert(caID string) ([]byte, bool) { cfg, ok := c.caConfigs[strings.ToLower(caID)] if ok { //for now, we're only loading the first Cert Authority by default. return cfg.TLSCAClientCert, true } return nil, false } //CAClientKey read configuration for the fabric CA client key bytes for given org func (c *IdentityConfig) CAClientKey(caID string) ([]byte, bool) { cfg, ok := c.caConfigs[strings.ToLower(caID)] if ok { //for now, we're only loading the first Cert Authority by default. return cfg.TLSCAClientKey, true } return nil, false } // CAServerCerts Read configuration option for the server certificates // will send a list of cert bytes for given org func (c *IdentityConfig) CAServerCerts(caID string) ([][]byte, bool) { cfg, ok := c.caConfigs[strings.ToLower(caID)] if ok { //for now, we're only loading the first Cert Authority by default. return cfg.TLSCAServerCerts, true } return nil, false } // CAKeyStorePath returns the same path as KeyStorePath() without the // 'keystore' directory added. This is done because the fabric-ca-client // adds this to the path func (c *IdentityConfig) CAKeyStorePath() string { return c.caKeyStorePath } // CredentialStorePath returns the user store path func (c *IdentityConfig) CredentialStorePath() string { return c.credentialStorePath
//loadIdentityConfigEntities loads config entities and dictionaries for searches func (c *IdentityConfig) loadIdentityConfigEntities() error { configEntity := identityConfigEntity{} err := c.backend.UnmarshalKey("client", &configEntity.Client) logger.Debugf("Client is: %+v", configEntity.Client) if err != nil { return errors.WithMessage(err, "failed to parse 'client' config item to identityConfigEntity.Client type") } err = c.backend.UnmarshalKey("organizations", &configEntity.Organizations) logger.Debugf("organizations are: %+v", configEntity.Organizations) if err != nil { return errors.WithMessage(err, "failed to parse 'organizations' config item to identityConfigEntity.Organizations type") } err = c.backend.UnmarshalKey("certificateAuthorities", &configEntity.CertificateAuthorities) logger.Debugf("certificateAuthorities are: %+v", configEntity.CertificateAuthorities) if err != nil { return errors.WithMessage(err, "failed to parse 'certificateAuthorities' config item to identityConfigEntity.CertificateAuthorities type") } // Populate ID from the lookup keys for caID := range configEntity.CertificateAuthorities { ca := configEntity.CertificateAuthorities[caID] ca.ID = caID configEntity.CertificateAuthorities[caID] = ca } //compile CA matchers err = c.compileMatchers() if err != nil { return errors.WithMessage(err, "failed to compile certificate authority matchers") } err = c.loadClientTLSConfig(&configEntity) if err != nil { return errors.WithMessage(err, "failed to load client TLSConfig ") } err = c.loadCATLSConfig(&configEntity) if err != nil { return errors.WithMessage(err, "failed to load CA TLSConfig ") } err = c.loadAllCAConfigs(&configEntity) if err != nil { return errors.WithMessage(err, "failed to load all CA configs ") } c.caKeyStorePath = pathvar.Subst(c.backend.GetString("client.credentialStore.cryptoStore.path")) c.credentialStorePath = pathvar.Subst(c.backend.GetString("client.credentialStore.path")) return nil } //loadClientTLSConfig pre-loads all TLSConfig bytes in client config func (c *IdentityConfig) loadClientTLSConfig(configEntity *identityConfigEntity) error { //Clients Config //resolve paths and org name configEntity.Client.Organization = strings.ToLower(configEntity.Client.Organization) configEntity.Client.TLSCerts.Client.Key.Path = pathvar.Subst(configEntity.Client.TLSCerts.Client.Key.Path) configEntity.Client.TLSCerts.Client.Cert.Path = pathvar.Subst(configEntity.Client.TLSCerts.Client.Cert.Path) //pre load client key and cert bytes err := configEntity.Client.TLSCerts.Client.Key.LoadBytes() if err != nil { return errors.WithMessage(err, "failed to load client key") } err = configEntity.Client.TLSCerts.Client.Cert.LoadBytes() if err != nil { return errors.WithMessage(err, "failed to load client cert") } c.client = &msp.ClientConfig{ Organization: configEntity.Client.Organization, Logging: configEntity.Client.Logging, CryptoConfig: configEntity.Client.CryptoConfig, CredentialStore: configEntity.Client.CredentialStore, TLSKey: configEntity.Client.TLSCerts.Client.Key.Bytes(), TLSCert: configEntity.Client.TLSCerts.Client.Cert.Bytes(), } return nil } //loadCATLSConfig pre-loads all TLSConfig bytes in certificate authorities func (c *IdentityConfig) loadCATLSConfig(configEntity *identityConfigEntity) error { //CA Config for ca, caConfig := range configEntity.CertificateAuthorities { //resolve paths caConfig.TLSCACerts.Path = pathvar.Subst(caConfig.TLSCACerts.Path) caConfig.TLSCACerts.Client.Key.Path = pathvar.Subst(caConfig.TLSCACerts.Client.Key.Path) caConfig.TLSCACerts.Client.Cert.Path = pathvar.Subst(caConfig.TLSCACerts.Client.Cert.Path) //pre load key and cert bytes err := caConfig.TLSCACerts.Client.Key.LoadBytes() if err != nil { return errors.WithMessage(err, "failed to load ca key") } err = caConfig.TLSCACerts.Client.Cert.LoadBytes() if err != nil { return errors.WithMessage(err, "failed to load ca cert") } configEntity.CertificateAuthorities[ca] = caConfig } return nil } func (c *IdentityConfig) loadAllCAConfigs(configEntity *identityConfigEntity) error { configs := make(map[string]*msp.CAConfig) for caID := range configEntity.CertificateAuthorities { matchedCaConfig, ok := c.tryMatchingCAConfig(configEntity, strings.ToLower(caID)) if !ok { continue } logger.Debugf("Mapped Certificate Authority [%s]", caID) mspCAConfig, err := c.getMSPCAConfig(matchedCaConfig) if err != nil { return err } configs[strings.ToLower(caID)] = mspCAConfig } c.caConfigs = configs return nil } func (c *IdentityConfig) getMSPCAConfig(caConfig *CAConfig) (*msp.CAConfig, error) { serverCerts, err := c.getServerCerts(caConfig) if err != nil { return nil, err } var URL string if caConfig.URL == "" { URL = defaultCAServerSchema + "://" + caConfig.ID + ":" + strconv.Itoa(defaultCAServerListenPort) } else { URL = caConfig.URL } return &msp.CAConfig{ ID: caConfig.ID, URL: URL, GRPCOptions: caConfig.GRPCOptions, Registrar: caConfig.Registrar, CAName: caConfig.CAName, TLSCAClientCert: caConfig.TLSCACerts.Client.Cert.Bytes(), TLSCAClientKey: caConfig.TLSCACerts.Client.Key.Bytes(), TLSCAServerCerts: serverCerts, }, nil } func (c *IdentityConfig) getServerCerts(caConfig *CAConfig) ([][]byte, error) { var serverCerts [][]byte //check for pems first pems := caConfig.TLSCACerts.Pem if len(pems) > 0 { serverCerts = make([][]byte, len(pems)) for i, pem := range pems { serverCerts[i] = []byte(pem) } return serverCerts, nil } //check for files if pems not found certFiles := strings.Split(caConfig.TLSCACerts.Path, ",") serverCerts = make([][]byte, len(certFiles)) for i, certPath := range certFiles { bytes, err := ioutil.ReadFile(pathvar.Subst(certPath)) if err != nil { return nil, errors.WithMessage(err, "failed to load server certs") } serverCerts[i] = bytes } return serverCerts, nil } func (c *IdentityConfig) compileMatchers() error { entMatchers := entityMatchers{} err := c.backend.UnmarshalKey("entityMatchers", &entMatchers.matchers) logger.Debugf("Matchers are: %+v", entMatchers) if err != nil { return errors.WithMessage(err, "failed to parse 'entMatchers' config item") } caMatcherConfigs := entMatchers.matchers["certificateauthority"] c.caMatchers = make([]matcherEntry, len(caMatcherConfigs)) if len(caMatcherConfigs) > 0 { for i, v := range caMatcherConfigs { regex, err := regexp.Compile(v.Pattern) if err != nil { return err } c.caMatchers[i] = matcherEntry{regex: regex, matchConfig: v} } } return nil } func (c *IdentityConfig) tryMatchingCAConfig(configEntity *identityConfigEntity, caID string) (*CAConfig, bool) { //loop over certAuthorityEntityMatchers to find the matching CA Config for _, matcher := range c.caMatchers { if matcher.regex.MatchString(caID) { return c.findMatchingCAConfig(configEntity, caID, matcher) } } //Direct lookup, if no caMatchers are configured or no matcher matched caConfig, ok := configEntity.CertificateAuthorities[strings.ToLower(caID)] if !ok { return nil, false } if caConfig.GRPCOptions == nil { caConfig.GRPCOptions = make(map[string]interface{}) } return &caConfig, true } func (c *IdentityConfig) findMatchingCAConfig(configEntity *identityConfigEntity, caID string, matcher matcherEntry) (*CAConfig, bool) { if matcher.matchConfig.IgnoreEndpoint { logger.Debugf("Ignoring CA `%s` since entity matcher 'IgnoreEndpoint' flag is on", caID) return nil, false } mappedHost := matcher.matchConfig.MappedHost if strings.Contains(mappedHost, "$") { mappedHost = matcher.regex.ReplaceAllString(caID, mappedHost) } //Get the certAuthorityMatchConfig from mapped host caConfig, ok := configEntity.CertificateAuthorities[strings.ToLower(mappedHost)] if !ok { return nil, false } if matcher.matchConfig.URLSubstitutionExp != "" { caConfig.URL = matcher.matchConfig.URLSubstitutionExp //check for regex replace '$' if strings.Contains(caConfig.URL, "$") { caConfig.URL = matcher.regex.ReplaceAllString(caID, caConfig.URL) } } if caConfig.GRPCOptions == nil { caConfig.GRPCOptions = make(map[string]interface{}) } //SSLTargetOverrideURLSubstitutionExp if found use from entity matcher otherwise use from mapped host if matcher.matchConfig.SSLTargetOverrideURLSubstitutionExp != "" { hostOverride := matcher.matchConfig.SSLTargetOverrideURLSubstitutionExp //check for regex replace '$' if strings.Contains(hostOverride, "$") { hostOverride = matcher.regex.ReplaceAllString(caID, hostOverride) } caConfig.GRPCOptions["ssl-target-name-override"] = hostOverride } return &caConfig, true }
}
xQueryGrammar.js
/* parser generated by jison 0.4.18 */ /* Returns a Parser object of the following structure: Parser: { yy: {} } Parser.prototype: { yy: {}, trace: function(), symbols_: {associative list: name ==> number}, terminals_: {associative list: number ==> name}, productions_: [...], performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$), table: [...], defaultActions: {...}, parseError: function(str, hash), parse: function(input), lexer: { EOF: 1, parseError: function(str, hash), setInput: function(input), input: function(), unput: function(str), more: function(), less: function(n), pastInput: function(), upcomingInput: function(), showPosition: function(), test_match: function(regex_match_array, rule_index), next: function(), lex: function(), begin: function(condition), popState: function(), _currentRules: function(), topState: function(), pushState: function(condition), options: { ranges: boolean (optional: true ==> token location info will include a .range[] member) flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match) backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code) }, performAction: function(yy, yy_, $avoiding_name_collisions, YY_START), rules: [...], conditions: {associative list: name ==> set}, } } token location info (@$, _$, etc.): { first_line: n, last_line: n, first_column: n, last_column: n, range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based) } the parseError function receives a 'hash' object with these members for lexer and parser errors: { text: (matched text) token: (the produced terminal token, if any) line: (yylineno) } while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: { loc: (yylloc) expected: (string describing the set of expected tokens) recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error) } */ var xQueryGrammar = (function(){ var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[1,5],$V1=[1,6],$V2=[1,15],$V3=[1,16],$V4=[1,17],$V5=[1,13],$V6=[1,14],$V7=[5,6],$V8=[5,6,65],$V9=[5,6,36],$Va=[1,33],$Vb=[1,23],$Vc=[1,24],$Vd=[1,31],$Ve=[1,25],$Vf=[1,26],$Vg=[1,27],$Vh=[1,28],$Vi=[1,29],$Vj=[1,30],$Vk=[1,32],$Vl=[1,46],$Vm=[1,47],$Vn=[1,50],$Vo=[1,44],$Vp=[1,45],$Vq=[1,48],$Vr=[5,6,27,29,33,36,65],$Vs=[1,53],$Vt=[1,54],$Vu=[5,6,27,29,33,36,41,45,46,47,48,49,50,51,52,53,54,55,56,57,60,62,65,80],$Vv=[1,57],$Vw=[1,70],$Vx=[1,71],$Vy=[1,72],$Vz=[1,73],$VA=[1,74],$VB=[1,75],$VC=[1,76],$VD=[1,77],$VE=[1,78],$VF=[1,79],$VG=[1,80],$VH=[1,81],$VI=[1,82],$VJ=[5,6,29,33,36,41,44,45,46,47,48,49,50,51,52,53,54,55,56,57],$VK=[5,6,27,29,33,36,41,45,46,47,48,49,50,51,52,53,54,55,56,57,60,62,65,78,80],$VL=[1,92],$VM=[1,93],$VN=[1,91],$VO=[1,94],$VP=[1,98],$VQ=[1,95],$VR=[1,96],$VS=[6,36],$VT=[5,6,21,22,27,29,33,36,41,44,45,46,47,48,49,50,51,52,53,54,55,56,57],$VU=[1,132],$VV=[1,133],$VW=[1,134],$VX=[1,135],$VY=[1,136],$VZ=[1,137],$V_=[1,138],$V$=[1,139],$V01=[1,140],$V11=[1,141],$V21=[1,142],$V31=[1,143],$V41=[1,144],$V51=[41,45,46,47,48,49,50,51,52,53,54,55,56,57,80],$V61=[5,6,29,33,36,41,44,45,46,50,51,52,53,54,55,56,57],$V71=[5,6,29,33,36,41,44,45,46,47,48,50,51,52,53,54,55,56,57],$V81=[5,6,29,33,36,41,44,50,51,56,57],$V91=[5,6,29,33,36,41,44,50,51,52,53,54,55,56,57],$Va1=[1,165],$Vb1=[1,166],$Vc1=[1,167],$Vd1=[1,189],$Ve1=[1,188],$Vf1=[27,29,33],$Vg1=[1,194],$Vh1=[1,195],$Vi1=[5,6,21,22,27,29,33,36,41,44,45,46,47,48,49,50,51,52,53,54,55,56,57,60,62],$Vj1=[41,45,46,50,51,52,53,54,55,56,57,80],$Vk1=[41,45,46,47,48,50,51,52,53,54,55,56,57,80],$Vl1=[41,50,51,56,57,80],$Vm1=[41,50,51,52,53,54,55,56,57,80]; var parser = {trace: function trace () { }, yy: {}, symbols_: {"error":2,"INIT":3,"LQUERYS":4,"EOF":5,",":6,"QUERY":7,"cadena":8,"scadena":9,"MULTIPATH":10,"XQUERY":11,"FOR":12,"LET":13,"RETURN":14,"for":15,"$":16,"id":17,"in":18,"TIPOPATH":19,"ORDERBY":20,"ascending":21,"descending":22,"WHERE":23,"at":24,"XQUERYPATH":25,"PATH":26,"where":27,"EXPXQUERY":28,"order":29,"by":30,"let":31,":=":32,"return":33,"{":34,"LEXPSRET":35,"}":36,"EXPRET":37,"IF":38,"if":39,"(":40,")":41,"then":42,"ElSEst":43,"else":44,"+":45,"-":46,"*":47,"div":48,"mod":49,"=":50,"!=":51,"<":52,"<=":53,">":54,">=":55,"and":56,"or":57,"VALOREXPXQUERY":58,"number":59,"/":60,"LACCESOSXQUERY":61,"//":62,"ACCESOXQUERY":63,"@":64,"|":65,"LACCESOS":66,"ACCESO":67,".":68,"..":69,"text":70,"node":71,"child":72,"::":73,"descendant":74,"PREDICADOS":75,"attribute":76,"PREDI":77,"[":78,"EXP":79,"]":80,"VALOR":81,"position":82,"last":83,"$accept":0,"$end":1}, terminals_: {2:"error",5:"EOF",6:",",8:"cadena",9:"scadena",15:"for",16:"$",17:"id",18:"in",21:"ascending",22:"descending",24:"at",27:"where",29:"order",30:"by",31:"let",32:":=",33:"return",34:"{",36:"}",39:"if",40:"(",41:")",42:"then",44:"else",45:"+",46:"-",47:"*",48:"div",49:"mod",50:"=",51:"!=",52:"<",53:"<=",54:">",55:">=",56:"and",57:"or",59:"number",60:"/",62:"//",64:"@",65:"|",68:".",69:"..",70:"text",71:"node",72:"child",73:"::",74:"descendant",76:"attribute",78:"[",80:"]",82:"position",83:"last"}, productions_: [0,[3,2],[3,1],[4,3],[4,1],[7,1],[7,1],[7,1],[7,1],[11,1],[11,1],[11,1],[12,6],[12,7],[12,8],[12,8],[12,7],[12,8],[12,9],[12,9],[12,9],[12,10],[12,11],[12,11],[12,10],[12,11],[12,12],[12,12],[19,1],[19,1],[23,2],[20,3],[13,5],[13,5],[13,6],[13,6],[14,4],[14,2],[35,3],[35,1],[37,1],[37,1],[37,1],[37,1],[38,7],[43,2],[43,2],[43,0],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,3],[28,1],[58,3],[58,1],[58,1],[58,1],[58,1],[25,4],[25,4],[25,2],[61,3],[61,3],[61,1],[63,1],[63,1],[63,2],[63,2],[10,3],[10,1],[26,2],[26,2],[66,3],[66,3],[66,1],[67,1],[67,1],[67,1],[67,1],[67,3],[67,3],[67,3],[67,3],[67,3],[67,3],[67,2],[67,2],[67,4],[67,4],[67,4],[67,4],[67,2],[67,2],[67,3],[67,3],[67,3],[67,3],[67,4],[67,4],[67,1],[75,2],[75,1],[77,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,3],[79,1],[81,3],[81,1],[81,1],[81,1],[81,3],[81,3],[81,1],[81,2]], performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) { /* this == yyval */ var $0 = $$.length - 1; switch (yystate) { case 1: return $$[$0-1]; break; case 2: return $$[$0]; break; case 3: case 38: case 77: $$[$0-2].push($$[$0]); this.$ = $$[$0-2]; break; case 4: case 39: case 72: case 78: case 83: case 110: this.$ = [$$[$0]]; break; case 5: case 6: case 63: case 64: case 127: case 128: this.$ = new Primitivo(_$[$0].first_line, _$[$0].first_column, $$[$0], tipoPrimitivo.STRING); break; case 7: this.$ = new MultiXpaths(_$[$0].first_line, _$[$0].first_column, $$[$0]); break; case 8: case 9: case 10: case 11: case 28: case 29: case 30: case 31: case 40: case 41: case 42: case 43: case 45: case 46: case 61: case 125: this.$ = $$[$0]; break; case 12: this.$ = new For(_$[$0-5].first_line, _$[$0-5].first_column, $$[$0-3], "", $$[$0-1], null, null, "", $$[$0]); break; case 13: this.$ = new For(_$[$0-6].first_line, _$[$0-6].first_column, $$[$0-4], "", $$[$0-2], null, $$[$0-1], "", $$[$0]); break; case 14: case 15: this.$ = new For(_$[$0-7].first_line, _$[$0-7].first_column, $$[$0-5], "", $$[$0-3], null, $$[$0-2], $$[$0-1], $$[$0]); break; case 16: this.$ = new For(_$[$0-6].first_line, _$[$0-6].first_column, $$[$0-4], "", $$[$0-2], $$[$0-1], null, "", $$[$0]); break; case 17: this.$ = new For(_$[$0-7].first_line, _$[$0-7].first_column, $$[$0-5], "", $$[$0-3], $$[$0-2], $$[$0-1], "", $$[$0]); break; case 18: case 19: this.$ = new For(_$[$0-8].first_line, _$[$0-8].first_column, $$[$0-6], "", $$[$0-4], $$[$0-3], $$[$0-2], $$[$0-1], $$[$0]); break; case 20: this.$ = new For(_$[$0-8].first_line, _$[$0-8].first_column, $$[$0-6], $$[$0-3], $$[$0-1], null, null, "", $$[$0]); break; case 21: this.$ = new For(_$[$0-9].first_line, _$[$0-9].first_column, $$[$0-7], $$[$0-4], $$[$0-2], null, $$[$0-1], "", $$[$0]); break; case 22: case 23: this.$ = new For(_$[$0-10].first_line, _$[$0-10].first_column, $$[$0-8], $$[$0-5], $$[$0-3], null, $$[$0-2], $$[$0-1], $$[$0]); break; case 24: this.$ = new For(_$[$0-9].first_line, _$[$0-9].first_column, $$[$0-7], $$[$0-4], $$[$0-2], $$[$0-1], null, "", $$[$0]); break; case 25: this.$ = new For(_$[$0-10].first_line, _$[$0-10].first_column, $$[$0-8], $$[$0-5], $$[$0-3], $$[$0-2], $$[$0-1], "", $$[$0]); break; case 26: case 27: this.$ = new For(_$[$0-11].first_line, _$[$0-11].first_column, $$[$0-9], $$[$0-6], $$[$0-4], $$[$0-3], $$[$0-2], $$[$0-1], $$[$0]); break; case 32: case 33: this.$ = new Let(_$[$0-4].first_line, _$[$0-4].first_column, $$[$0-2], $$[$0], new Return (_$[$0-4].first_line, _$[$0-4].first_column, [])); break; case 34: case 35: this.$ = new Let(_$[$0-5].first_line, _$[$0-5].first_column, $$[$0-3], $$[$0-1], $$[$0]); break; case 36: this.$ = new Return (_$[$0-3].first_line, _$[$0-3].first_column, $$[$0-1]); break; case 37: this.$ = new Return (_$[$0-1].first_line, _$[$0-1].first_column, [$$[$0]]); break; case 44: this.$ = new If(_$[$0-6].first_line, _$[$0-6].first_column, $$[$0-4], $$[$0-1], $$[$0]); break; case 47: this.$ = null; break; case 48: case 112: this.$ = new Aritmetico(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionAritmetica.SUMA, $$[$0-1]); break; case 49: case 113: this.$ = new Aritmetico(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionAritmetica.RESTA, $$[$0-1]); break; case 50: case 114: this.$ = new Aritmetico(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionAritmetica.MULT, $$[$0-1]); break; case 51: case 115: this.$ = new Aritmetico(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionAritmetica.DIV, $$[$0-1]); break; case 52: case 116: this.$ = new Aritmetico(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionAritmetica.MOD, $$[$0-1]); break; case 53: case 117: this.$ = new Relacional(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionRelacional.IGUAL, $$[$0-1]); break; case 54: case 118: this.$ = new Relacional(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionRelacional.DIFERENCIACION, $$[$0-1]); break; case 55: case 119: this.$ = new Relacional(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionRelacional.MENOR, $$[$0-1]); break; case 56: case 120: this.$ = new Relacional(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionRelacional.MENORIGUAL, $$[$0-1]); break; case 57: case 121: this.$ = new Relacional(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionRelacional.MAYOR, $$[$0-1]); break; case 58: case 122: this.$ = new Relacional(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionRelacional.MAYORIGUAL, $$[$0-1]); break; case 59: case 123: this.$ = new Logica(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionLogica.AND, $$[$0-1]); break; case 60: case 124: this.$ = new Logica(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-2], $$[$0], operacionLogica.OR, $$[$0-1]); break; case 62: case 111: case 126: this.$ = $$[$0-1]; break; case 65: case 129: this.$ = new Primitivo(_$[$0].first_line, _$[$0].first_column, $$[$0], tipoPrimitivo.NUMBER); break; case 66: this.$ = $$[$0] break; case 67: $$[$0][0].tipoQuery = 'relativa'; this.$ = new XqueryPath(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-2], new Path(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0], 'sub')); break; case 68: $$[$0][0].tipoQuery = 'absoluta'; this.$ = new XqueryPath(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-2], new Path(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0], 'sub')); break; case 69: this.$ = new XqueryPath(_$[$0].first_line, _$[$0].first_column, $$[$0], new Path(_$[$0].first_line, _$[$0].first_column, [], 'sub')); break; case 70: case 71: $$[$0].tipoQuery = 'relativa'; $$[$0-2].push($$[$0]); this.$ = $$[$0-2]; break; case 73: case 84: this.$ = new Acceso(_$[$0].first_line, _$[$0].first_column, $$[$0], 'nodo', []); break; case 74: case 85: this.$ = new Acceso(_$[$0].first_line, _$[$0].first_column, $$[$0], 'todosNodos', []); break; case 75: case 100: this.$ = new Acceso(_$[$0].first_line, _$[$0].first_column, $$[$0], 'atributo', []); break; case 76: case 101: this.$ = new Acceso(_$[$0].first_line, _$[$0].first_column, $$[$0], 'todosAtributos', []); break; case 79: if($$[$0][0].tipoQuery === undefined){$$[$0][0].tipoQuery = 'relativa';} this.$ = new Path(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0]); break; case 80: if($$[$0][0].tipoQuery === undefined){$$[$0][0].tipoQuery = 'absoluta';} this.$ = new Path(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0]); break; case 81: if($$[$0].tipoQuery === undefined){$$[$0].tipoQuery = 'relativa'} $$[$0-2].push($$[$0]); this.$ = $$[$0-2]; break; case 82: if($$[$0].tipoQuery === undefined){$$[$0].tipoQuery = 'absoluta'} $$[$0-2].push($$[$0]); this.$ = $$[$0-2]; break; case 86: this.$ = new Acceso(_$[$0].first_line, _$[$0].first_column, $$[$0], 'actual', []); break; case 87: this.$ = new Acceso(_$[$0].first_line, _$[$0].first_column, $$[$0], 'padre', []); break; case 88: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-2], 'texto', []); break; case 89: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-2], 'todosNodos', []); break; case 90: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0], 'nodo', []); break; case 91: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0], 'todosNodos', []); break; case 92: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0], 'nodo', [], 'absoluta'); break; case 93: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0], 'todosNodos', [], 'absoluta'); break; case 94: this.$ = new Acceso(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-1], 'nodo', $$[$0]); break; case 95: this.$ = new Acceso(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-1], 'todosNodos', $$[$0]); break; case 96: this.$ = new Acceso(_$[$0-3].first_line, _$[$0-3].first_column, $$[$0-1], 'nodo', $$[$0]); break; case 97: this.$ = new Acceso(_$[$0-3].first_line, _$[$0-3].first_column, $$[$0-1], 'todosNodos', $$[$0]); break; case 98: this.$ = new Acceso(_$[$0-3].first_line, _$[$0-3].first_column, $$[$0-1], 'nodo', $$[$0], 'absoluta'); break; case 99: this.$ = new Acceso(_$[$0-3].first_line, _$[$0-3].first_column, $$[$0-1], 'todosNodos', $$[$0], 'absoluta'); break; case 102: this.$ = new Acceso(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0], 'atributo', []); break; case 103: this.$ = new Acceso(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0], 'todosAtributos', []); break; case 104: this.$ = new Acceso(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-1], 'atributo', $$[$0]); break; case 105: this.$ = new Acceso(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0-1], 'todosAtributos', $$[$0]); break; case 106: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-1], 'atributo', $$[$0]); break; case 107: this.$ = new Acceso(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-1], 'todosAtributos', $$[$0]); break; case 108: listaErrores.push(new ClaseError('Sintactico','Se esperaba la definicion de una etiqueta',_$[$0].first_line, _$[$0].first_column)) break; case 109: $$[$0-1].push($$[$0]); this.$ = $$[$0-1]; break; case 130: case 131: this.$ = new Primitivo(_$[$0-2].first_line, _$[$0-2].first_column, $$[$0-2]); break; case 132: if($$[$0][0].tipoQuery === undefined){$$[$0][0].tipoQuery = 'relativa';} this.$ = new Path(_$[$0].first_line, _$[$0].first_column, $$[$0], 'sub'); break; case 133: if($$[$0][0].tipoQuery === undefined){$$[$0][0].tipoQuery ='relativa';} this.$ = new Path(_$[$0-1].first_line, _$[$0-1].first_column, $$[$0], 'sub'); break; } }, table: [{3:1,4:2,5:[1,3],7:4,8:$V0,9:$V1,10:7,11:8,12:10,13:11,14:12,15:$V2,26:9,31:$V3,33:$V4,60:$V5,62:$V6},{1:[3]},{5:[1,18],6:[1,19]},{1:[2,2]},o($V7,[2,4]),o($V7,[2,5]),o($V7,[2,6]),o($V7,[2,7],{65:[1,20]}),o($V7,[2,8]),o($V8,[2,78]),o($V9,[2,9]),o($V9,[2,10]),o($V9,[2,11]),{2:$Va,17:$Vb,47:$Vc,64:$Vd,66:21,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk},{2:$Va,17:$Vb,47:$Vc,64:$Vd,66:34,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk},{16:[1,35]},{16:[1,36]},{8:$Vl,9:$Vm,11:39,12:10,13:11,14:12,15:$V2,16:$Vn,25:49,26:41,28:40,31:$V3,33:$V4,34:[1,37],37:38,38:42,39:$Vo,40:$Vp,58:43,59:$Vq,60:$V5,62:$V6},{1:[2,1]},{7:51,8:$V0,9:$V1,10:7,11:8,12:10,13:11,14:12,15:$V2,26:9,31:$V3,33:$V4,60:$V5,62:$V6},{26:52,60:$V5,62:$V6},o($Vr,[2,79],{60:$Vs,62:$Vt}),o($Vu,[2,83]),o($Vu,[2,84],{75:55,77:56,78:$Vv}),o($Vu,[2,85],{77:56,75:58,78:$Vv}),o($Vu,[2,86]),o($Vu,[2,87]),{40:[1,59]},{40:[1,60]},{73:[1,61]},{73:[1,62]},{17:[1,63],47:[1,64]},{73:[1,65]},o($Vu,[2,108]),o($Vr,[2,80],{60:$Vs,62:$Vt}),{17:[1,66]},{17:[1,67]},{8:$Vl,9:$Vm,11:39,12:10,13:11,14:12,15:$V2,16:$Vn,25:49,26:41,28:40,31:$V3,33:$V4,35:68,37:69,38:42,39:$Vo,40:$Vp,58:43,59:$Vq,60:$V5,62:$V6},o($V9,[2,37]),o($V9,[2,40]),o($V9,[2,41],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI}),o($V9,[2,42]),o($V9,[2,43]),o($VJ,[2,61]),{40:[1,83]},{8:$Vl,9:$Vm,16:$Vn,25:49,28:84,40:$Vp,58:43,59:$Vq},o($VJ,[2,63]),o($VJ,[2,64]),o($VJ,[2,65]),o($VJ,[2,66]),{17:[1,85]},o($V7,[2,3]),o($V8,[2,77]),{2:$Va,17:$Vb,47:$Vc,64:$Vd,67:86,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk},{2:$Va,17:$Vb,47:$Vc,64:$Vd,67:87,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk},o($Vu,[2,94],{77:88,78:$Vv}),o($VK,[2,110]),{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:89,81:90,82:$VQ,83:$VR},o($Vu,[2,95],{77:88,78:$Vv}),{41:[1,99]},{41:[1,100]},{17:[1,101],47:[1,102]},{17:[1,103],47:[1,104]},o($Vu,[2,100],{77:56,75:105,78:$Vv}),o($Vu,[2,101],{77:56,75:106,78:$Vv}),{17:[1,107],47:[1,108]},{18:[1,109],24:[1,110]},{32:[1,111]},{6:[1,113],36:[1,112]},o($VS,[2,39]),{8:$Vl,9:$Vm,16:$Vn,25:49,28:114,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:115,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:116,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:117,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:118,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:119,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:120,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:121,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:122,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:123,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:124,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:125,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:126,40:$Vp,58:43,59:$Vq},{8:$Vl,9:$Vm,16:$Vn,25:49,28:127,40:$Vp,58:43,59:$Vq},{41:[1,128],45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI},o($VT,[2,69],{60:[1,129],62:[1,130]}),o($Vu,[2,81]),o($Vu,[2,82]),o($VK,[2,109]),{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY,50:$VZ,51:$V_,52:$V$,53:$V01,54:$V11,55:$V21,56:$V31,57:$V41,80:[1,131]},o($V51,[2,125]),{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:145,81:90,82:$VQ,83:$VR},o($V51,[2,127]),o($V51,[2,128]),o($V51,[2,129]),{40:[1,146]},{40:[1,147]},o($V51,[2,132],{60:$Vs,62:$Vt}),{2:$Va,17:$Vb,47:$Vc,64:$Vd,66:148,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk},o($Vu,[2,88]),o($Vu,[2,89]),o($Vu,[2,90],{77:56,75:149,78:$Vv}),o($Vu,[2,91],{77:56,75:150,78:$Vv}),o($Vu,[2,92],{77:56,75:151,78:$Vv}),o($Vu,[2,93],{77:56,75:152,78:$Vv}),o($Vu,[2,104],{77:88,78:$Vv}),o($Vu,[2,105],{77:88,78:$Vv}),o($Vu,[2,102],{77:56,75:153,78:$Vv}),o($Vu,[2,103],{77:56,75:154,78:$Vv}),{16:$Vn,19:155,25:156,26:157,60:$V5,62:$V6},{16:[1,158]},{8:$Vl,9:$Vm,16:$Vn,25:49,26:160,28:159,40:$Vp,58:43,59:$Vq,60:$V5,62:$V6},o($V9,[2,36]),{8:$Vl,9:$Vm,11:39,12:10,13:11,14:12,15:$V2,16:$Vn,25:49,26:41,28:40,31:$V3,33:$V4,37:161,38:42,39:$Vo,40:$Vp,58:43,59:$Vq,60:$V5,62:$V6},o($V61,[2,48],{47:$Vy,48:$Vz,49:$VA}),o($V61,[2,49],{47:$Vy,48:$Vz,49:$VA}),o($V71,[2,50],{49:$VA}),o($V71,[2,51],{49:$VA}),o($VJ,[2,52]),o($V81,[2,53],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,52:$VD,53:$VE,54:$VF,55:$VG}),o($V81,[2,54],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,52:$VD,53:$VE,54:$VF,55:$VG}),o($V91,[2,55],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA}),o($V91,[2,56],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA}),o($V91,[2,57],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA}),o($V91,[2,58],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA}),o([5,6,29,33,36,41,44,56,57],[2,59],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG}),o([5,6,29,33,36,41,44,57],[2,60],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH}),{41:[1,162],45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI},o($VJ,[2,62]),{17:$Va1,47:$Vb1,61:163,63:164,64:$Vc1},{17:$Va1,47:$Vb1,61:168,63:164,64:$Vc1},o($VK,[2,111]),{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:169,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:170,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:171,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:172,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:173,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:174,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:175,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:176,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:177,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:178,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:179,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:180,81:90,82:$VQ,83:$VR},{2:$Va,8:$VL,9:$VM,17:$Vb,40:$VN,47:$Vc,59:$VO,62:$VP,64:$Vd,66:97,67:22,68:$Ve,69:$Vf,70:$Vg,71:$Vh,72:$Vi,74:$Vj,76:$Vk,79:181,81:90,82:$VQ,83:$VR},{41:[1,182],45:$VU,46:$VV,47:$VW,48:$VX,49:$VY,50:$VZ,51:$V_,52:$V$,53:$V01,54:$V11,55:$V21,56:$V31,57:$V41},{41:[1,183]},{41:[1,184]},o($V51,[2,133],{60:$Vs,62:$Vt}),o($Vu,[2,96],{77:88,78:$Vv}),o($Vu,[2,97],{77:88,78:$Vv}),o($Vu,[2,98],{77:88,78:$Vv}),o($Vu,[2,99],{77:88,78:$Vv}),o($Vu,[2,106],{77:88,78:$Vv}),o($Vu,[2,107],{77:88,78:$Vv}),{14:185,20:186,23:187,27:$Vd1,29:$Ve1,33:$V4},o($Vf1,[2,28]),o($Vf1,[2,29]),{17:[1,190]},o($V9,[2,32],{14:191,33:$V4,45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI}),o($V9,[2,33],{14:192,33:$V4}),o($VS,[2,38]),{42:[1,193]},o($VT,[2,67],{60:$Vg1,62:$Vh1}),o($Vi1,[2,72]),o($Vi1,[2,73]),o($Vi1,[2,74]),{17:[1,196],47:[1,197]},o($VT,[2,68],{60:$Vg1,62:$Vh1}),o($Vj1,[2,112],{47:$VW,48:$VX,49:$VY}),o($Vj1,[2,113],{47:$VW,48:$VX,49:$VY}),o($Vk1,[2,114],{49:$VY}),o($Vk1,[2,115],{49:$VY}),o($V51,[2,116]),o($Vl1,[2,117],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY,52:$V$,53:$V01,54:$V11,55:$V21}),o($Vl1,[2,118],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY,52:$V$,53:$V01,54:$V11,55:$V21}),o($Vm1,[2,119],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY}),o($Vm1,[2,120],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY}),o($Vm1,[2,121],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY}),o($Vm1,[2,122],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY}),o([41,56,57,80],[2,123],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY,50:$VZ,51:$V_,52:$V$,53:$V01,54:$V11,55:$V21}),o([41,57,80],[2,124],{45:$VU,46:$VV,47:$VW,48:$VX,49:$VY,50:$VZ,51:$V_,52:$V$,53:$V01,54:$V11,55:$V21,56:$V31}),o($V51,[2,126]),o($V51,[2,130]),o($V51,[2,131]),o($V9,[2,12]),{14:198,21:[1,199],22:[1,200],33:$V4},{14:201,20:202,29:$Ve1,33:$V4},{30:[1,203]},{8:$Vl,9:$Vm,16:$Vn,25:49,28:204,40:$Vp,58:43,59:$Vq},{18:[1,205]},o($V9,[2,34]),o($V9,[2,35]),{8:$Vl,9:$Vm,16:$Vn,25:49,28:206,40:$Vp,58:43,59:$Vq},{17:$Va1,47:$Vb1,63:207,64:$Vc1},{17:$Va1,47:$Vb1,63:208,64:$Vc1},o($Vi1,[2,75]),o($Vi1,[2,76]),o($V9,[2,13]),{14:209,33:$V4},{14:210,33:$V4},o($V9,[2,16]),{14:211,21:[1,212],22:[1,213],33:$V4},{16:$Vn,25:214},o([29,33],[2,30],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI}),{16:$Vn,19:215,25:156,26:157,60:$V5,62:$V6},o($V9,[2,47],{43:216,44:[1,217],45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI}),o($Vi1,[2,70]),o($Vi1,[2,71]),o($V9,[2,14]),o($V9,[2,15]),o($V9,[2,17]),{14:218,33:$V4},{14:219,33:$V4},o([21,22,33],[2,31]),{14:220,20:221,23:222,27:$Vd1,29:$Ve1,33:$V4},o($V9,[2,44]),{8:$Vl,9:$Vm,16:$Vn,25:49,28:223,38:224,39:$Vo,40:$Vp,58:43,59:$Vq},o($V9,[2,18]),o($V9,[2,19]),o($V9,[2,20]),{14:225,21:[1,226],22:[1,227],33:$V4},{14:228,20:229,29:$Ve1,33:$V4},o($V9,[2,45],{45:$Vw,46:$Vx,47:$Vy,48:$Vz,49:$VA,50:$VB,51:$VC,52:$VD,53:$VE,54:$VF,55:$VG,56:$VH,57:$VI}),o($V9,[2,46]),o($V9,[2,21]),{14:230,33:$V4},{14:231,33:$V4},o($V9,[2,24]),{14:232,21:[1,233],22:[1,234],33:$V4},o($V9,[2,22]),o($V9,[2,23]),o($V9,[2,25]),{14:235,33:$V4},{14:236,33:$V4},o($V9,[2,26]),o($V9,[2,27])], defaultActions: {3:[2,2],18:[2,1]}, parseError: function parseError (str, hash) { if (hash.recoverable) { this.trace(str); } else { var error = new Error(str); error.hash = hash; throw error; } }, parse: function parse (input) { var self = this, stack = [0], tstack = [], // token stack vstack = [null], // semantic value stack lstack = [], // location stack table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1; var args = lstack.slice.call(arguments, 1); //this.reductionCount = this.shiftCount = 0; var lexer = Object.create(this.lexer); var sharedState = { yy: {} }; // copy state for (var k in this.yy) { if (Object.prototype.hasOwnProperty.call(this.yy, k)) { sharedState.yy[k] = this.yy[k]; } } lexer.setInput(input, sharedState.yy); sharedState.yy.lexer = lexer; sharedState.yy.parser = this; if (typeof lexer.yylloc == 'undefined') { lexer.yylloc = {}; } var yyloc = lexer.yylloc; lstack.push(yyloc); var ranges = lexer.options && lexer.options.ranges; if (typeof sharedState.yy.parseError === 'function') { this.parseError = sharedState.yy.parseError; } else { this.parseError = Object.getPrototypeOf(this).parseError; } function
(n) { stack.length = stack.length - 2 * n; vstack.length = vstack.length - n; lstack.length = lstack.length - n; } _token_stack: var lex = function () { var token; token = lexer.lex() || EOF; // if token isn't its numeric value, convert if (typeof token !== 'number') { token = self.symbols_[token] || token; } return token; } var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected; while (true) { // retreive state number from top of stack state = stack[stack.length - 1]; // use default actions if available if (this.defaultActions[state]) { action = this.defaultActions[state]; } else { if (symbol === null || typeof symbol == 'undefined') { symbol = lex(); } // read action for current state and first input action = table[state] && table[state][symbol]; } _handle_error: // handle parse error if (typeof action === 'undefined' || !action.length || !action[0]) { var error_rule_depth; var errStr = ''; // Return the rule stack depth where the nearest error rule can be found. // Return FALSE when no error recovery rule was found. function locateNearestErrorRecoveryRule(state) { var stack_probe = stack.length - 1; var depth = 0; // try to recover from error for(;;) { // check for error recovery rule in this state if ((TERROR.toString()) in table[state]) { return depth; } if (state === 0 || stack_probe < 2) { return false; // No suitable error recovery rule available. } stack_probe -= 2; // popStack(1): [symbol, action] state = stack[stack_probe]; ++depth; } } if (!recovering) { // first see if there's any chance at hitting an error recovery rule: error_rule_depth = locateNearestErrorRecoveryRule(state); // Report error expected = []; for (p in table[state]) { if (this.terminals_[p] && p > TERROR) { expected.push("'"+this.terminals_[p]+"'"); } } if (lexer.showPosition) { errStr = 'Parse error on line '+(yylineno+1)+":\n"+lexer.showPosition()+"\nExpecting "+expected.join(', ') + ", got '" + (this.terminals_[symbol] || symbol)+ "'"; } else { errStr = 'Parse error on line '+(yylineno+1)+": Unexpected " + (symbol == EOF ? "end of input" : ("'"+(this.terminals_[symbol] || symbol)+"'")); } this.parseError(errStr, { text: lexer.match, token: this.terminals_[symbol] || symbol, line: lexer.yylineno, loc: yyloc, expected: expected, recoverable: (error_rule_depth !== false) }); } else if (preErrorSymbol !== EOF) { error_rule_depth = locateNearestErrorRecoveryRule(state); } // just recovered from another error if (recovering == 3) { if (symbol === EOF || preErrorSymbol === EOF) { throw new Error(errStr || 'Parsing halted while starting to recover from another error.'); } // discard current lookahead and grab another yyleng = lexer.yyleng; yytext = lexer.yytext; yylineno = lexer.yylineno; yyloc = lexer.yylloc; symbol = lex(); } // try to recover from error if (error_rule_depth === false) { throw new Error(errStr || 'Parsing halted. No suitable error recovery rule available.'); } popStack(error_rule_depth); preErrorSymbol = (symbol == TERROR ? null : symbol); // save the lookahead token symbol = TERROR; // insert generic error symbol as new lookahead state = stack[stack.length-1]; action = table[state] && table[state][TERROR]; recovering = 3; // allow 3 real symbols to be shifted before reporting a new error } // this shouldn't happen, unless resolve defaults are off if (action[0] instanceof Array && action.length > 1) { throw new Error('Parse Error: multiple actions possible at state: '+state+', token: '+symbol); } switch (action[0]) { case 1: // shift //this.shiftCount++; stack.push(symbol); vstack.push(lexer.yytext); lstack.push(lexer.yylloc); stack.push(action[1]); // push state symbol = null; if (!preErrorSymbol) { // normal execution/no error yyleng = lexer.yyleng; yytext = lexer.yytext; yylineno = lexer.yylineno; yyloc = lexer.yylloc; if (recovering > 0) { recovering--; } } else { // error just occurred, resume old lookahead f/ before error symbol = preErrorSymbol; preErrorSymbol = null; } break; case 2: // reduce //this.reductionCount++; len = this.productions_[action[1]][1]; // perform semantic action yyval.$ = vstack[vstack.length-len]; // default to $$ = $1 // default location, uses first token for firsts, last for lasts yyval._$ = { first_line: lstack[lstack.length-(len||1)].first_line, last_line: lstack[lstack.length-1].last_line, first_column: lstack[lstack.length-(len||1)].first_column, last_column: lstack[lstack.length-1].last_column }; if (ranges) { yyval._$.range = [lstack[lstack.length-(len||1)].range[0], lstack[lstack.length-1].range[1]]; } r = this.performAction.apply(yyval, [yytext, yyleng, yylineno, sharedState.yy, action[1], vstack, lstack].concat(args)); if (typeof r !== 'undefined') { return r; } // pop off stack if (len) { stack = stack.slice(0,-1*len*2); vstack = vstack.slice(0, -1*len); lstack = lstack.slice(0, -1*len); } stack.push(this.productions_[action[1]][0]); // push nonterminal (reduce) vstack.push(yyval.$); lstack.push(yyval._$); // goto new state = table[STATE][NONTERMINAL] newState = table[stack[stack.length-2]][stack[stack.length-1]]; stack.push(newState); break; case 3: // accept return true; } } return true; }}; const {Acceso} = require("../xqueryAST/ExpresionesXpath/Acceso"); const {Aritmetico, operacionAritmetica} = require("../xqueryAST/ExpresionesXpath/Aritmetico"); const {Logica, operacionLogica} = require("../xqueryAST/ExpresionesXpath/Logica"); const {Path} = require("../xqueryAST/ExpresionesXpath/Path"); const {Primitivo, tipoPrimitivo} = require("../xqueryAST/ExpresionesXpath/Primitivo"); const {Relacional, operacionRelacional} = require("../xqueryAST/ExpresionesXpath/Relacional"); const {ClaseError} = require("../xmlAST/ClaseError"); const {For} = require("../xqueryAST/ExpresionesXquery/For"); const {If} = require("../xqueryAST/ExpresionesXquery/If"); const {Let} = require("../xqueryAST/ExpresionesXquery/Let"); const {MultiXpaths} = require("../xqueryAST/ExpresionesXquery/MultiXpaths"); const {Return} = require("../xqueryAST/ExpresionesXquery/Return"); const {XqueryPath} = require("../xqueryAST/ExpresionesXquery/XqueryPath"); var listaErrores = []; var tmp=""; /* generated by jison-lex 0.3.4 */ var lexer = (function(){ var lexer = ({ EOF:1, parseError:function parseError(str, hash) { if (this.yy.parser) { this.yy.parser.parseError(str, hash); } else { throw new Error(str); } }, // resets the lexer, sets new input setInput:function (input, yy) { this.yy = yy || this.yy || {}; this._input = input; this._more = this._backtrack = this.done = false; this.yylineno = this.yyleng = 0; this.yytext = this.matched = this.match = ''; this.conditionStack = ['INITIAL']; this.yylloc = { first_line: 1, first_column: 0, last_line: 1, last_column: 0 }; if (this.options.ranges) { this.yylloc.range = [0,0]; } this.offset = 0; return this; }, // consumes and returns one char from the input input:function () { var ch = this._input[0]; this.yytext += ch; this.yyleng++; this.offset++; this.match += ch; this.matched += ch; var lines = ch.match(/(?:\r\n?|\n).*/g); if (lines) { this.yylineno++; this.yylloc.last_line++; } else { this.yylloc.last_column++; } if (this.options.ranges) { this.yylloc.range[1]++; } this._input = this._input.slice(1); return ch; }, // unshifts one char (or a string) into the input unput:function (ch) { var len = ch.length; var lines = ch.split(/(?:\r\n?|\n)/g); this._input = ch + this._input; this.yytext = this.yytext.substr(0, this.yytext.length - len); //this.yyleng -= len; this.offset -= len; var oldLines = this.match.split(/(?:\r\n?|\n)/g); this.match = this.match.substr(0, this.match.length - 1); this.matched = this.matched.substr(0, this.matched.length - 1); if (lines.length - 1) { this.yylineno -= lines.length - 1; } var r = this.yylloc.range; this.yylloc = { first_line: this.yylloc.first_line, last_line: this.yylineno + 1, first_column: this.yylloc.first_column, last_column: lines ? (lines.length === oldLines.length ? this.yylloc.first_column : 0) + oldLines[oldLines.length - lines.length].length - lines[0].length : this.yylloc.first_column - len }; if (this.options.ranges) { this.yylloc.range = [r[0], r[0] + this.yyleng - len]; } this.yyleng = this.yytext.length; return this; }, // When called from action, caches matched text and appends it on next action more:function () { this._more = true; return this; }, // When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead. reject:function () { if (this.options.backtrack_lexer) { this._backtrack = true; } else { return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), { text: "", token: null, line: this.yylineno }); } return this; }, // retain first n characters of the match less:function (n) { this.unput(this.match.slice(n)); }, // displays already matched input, i.e. for error messages pastInput:function () { var past = this.matched.substr(0, this.matched.length - this.match.length); return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, ""); }, // displays upcoming input, i.e. for error messages upcomingInput:function () { var next = this.match; if (next.length < 20) { next += this._input.substr(0, 20-next.length); } return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, ""); }, // displays the character position where the lexing error occurred, i.e. for error messages showPosition:function () { var pre = this.pastInput(); var c = new Array(pre.length + 1).join("-"); return pre + this.upcomingInput() + "\n" + c + "^"; }, // test the lexed token: return FALSE when not a match, otherwise return token test_match:function(match, indexed_rule) { var token, lines, backup; if (this.options.backtrack_lexer) { // save context backup = { yylineno: this.yylineno, yylloc: { first_line: this.yylloc.first_line, last_line: this.last_line, first_column: this.yylloc.first_column, last_column: this.yylloc.last_column }, yytext: this.yytext, match: this.match, matches: this.matches, matched: this.matched, yyleng: this.yyleng, offset: this.offset, _more: this._more, _input: this._input, yy: this.yy, conditionStack: this.conditionStack.slice(0), done: this.done }; if (this.options.ranges) { backup.yylloc.range = this.yylloc.range.slice(0); } } lines = match[0].match(/(?:\r\n?|\n).*/g); if (lines) { this.yylineno += lines.length; } this.yylloc = { first_line: this.yylloc.last_line, last_line: this.yylineno + 1, first_column: this.yylloc.last_column, last_column: lines ? lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length : this.yylloc.last_column + match[0].length }; this.yytext += match[0]; this.match += match[0]; this.matches = match; this.yyleng = this.yytext.length; if (this.options.ranges) { this.yylloc.range = [this.offset, this.offset += this.yyleng]; } this._more = false; this._backtrack = false; this._input = this._input.slice(match[0].length); this.matched += match[0]; token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]); if (this.done && this._input) { this.done = false; } if (token) { return token; } else if (this._backtrack) { // recover context for (var k in backup) { this[k] = backup[k]; } return false; // rule action called reject() implying the next rule should be tested instead. } return false; }, // return next match in input next:function () { if (this.done) { return this.EOF; } if (!this._input) { this.done = true; } var token, match, tempMatch, index; if (!this._more) { this.yytext = ''; this.match = ''; } var rules = this._currentRules(); for (var i = 0; i < rules.length; i++) { tempMatch = this._input.match(this.rules[rules[i]]); if (tempMatch && (!match || tempMatch[0].length > match[0].length)) { match = tempMatch; index = i; if (this.options.backtrack_lexer) { token = this.test_match(tempMatch, rules[i]); if (token !== false) { return token; } else if (this._backtrack) { match = false; continue; // rule action called reject() implying a rule MISmatch. } else { // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) return false; } } else if (!this.options.flex) { break; } } } if (match) { token = this.test_match(match, rules[index]); if (token !== false) { return token; } // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) return false; } if (this._input === "") { return this.EOF; } else { return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), { text: "", token: null, line: this.yylineno }); } }, // return next match that has a token lex:function lex () { var r = this.next(); if (r) { return r; } else { return this.lex(); } }, // activates a new lexer condition state (pushes the new lexer condition state onto the condition stack) begin:function begin (condition) { this.conditionStack.push(condition); }, // pop the previously active lexer condition state off the condition stack popState:function popState () { var n = this.conditionStack.length - 1; if (n > 0) { return this.conditionStack.pop(); } else { return this.conditionStack[0]; } }, // produce the lexer rule set which is active for the currently active lexer condition state _currentRules:function _currentRules () { if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) { return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules; } else { return this.conditions["INITIAL"].rules; } }, // return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available topState:function topState (n) { n = this.conditionStack.length - 1 - Math.abs(n || 0); if (n >= 0) { return this.conditionStack[n]; } else { return "INITIAL"; } }, // alias for begin(condition) pushState:function pushState (condition) { this.begin(condition); }, // return the number of states currently on the stack stateStackSize:function stateStackSize() { return this.conditionStack.length; }, options: {"case-insensitive":true}, performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) { var YYSTATE=YY_START; switch($avoiding_name_collisions) { case 0:return 59 break; case 1:this.begin('string'); tmp=""; break; case 2:tmp=tmp+yy_.yytext; this.begin('string'); break; case 3:tmp=tmp+yy_.yytext; this.begin('string'); break; case 4:tmp=tmp+yy_.yytext; this.begin('string'); break; case 5:tmp=tmp+yy_.yytext; this.begin('string'); break; case 6:tmp=tmp+yy_.yytext; this.begin('string'); break; case 7: tmp= tmp+yy_.yytext; this.begin('string'); break; case 8: this.begin('INITIAL'); yy_.yytext= tmp; tmp = ""; return 8; break; case 9:this.begin('string'); tmp=""; break; case 10:tmp=tmp+yy_.yytext; this.begin('string'); break; case 11:tmp=tmp+yy_.yytext; this.begin('string'); break; case 12:tmp=tmp+yy_.yytext; this.begin('string'); break; case 13:tmp=tmp+yy_.yytext; this.begin('string'); break; case 14:tmp=tmp+yy_.yytext; this.begin('string'); break; case 15: tmp= tmp+yy_.yytext; this.begin('string'); break; case 16: this.begin('INITIAL'); yy_.yytext= tmp; tmp = ""; return 9; break; case 17:return 62 break; case 18:return 60 break; case 19:return 69 break; case 20:return 68 break; case 21:return 6 break; case 22:return 64 break; case 23:return 78 break; case 24:return 80 break; case 25:return 40 break; case 26:return 41 break; case 27:return 65 break; case 28:return 45 break; case 29:return 46 break; case 30:return 47 break; case 31:return 48 break; case 32:return 50 break; case 33:return 51 break; case 34:return 53 break; case 35:return 55 break; case 36:return 52 break; case 37:return 54 break; case 38:return 57 break; case 39:return 56 break; case 40:return 49 break; case 41: break; case 42: break; case 43:return 73 break; case 44:return 72 break; case 45:return 76 break; case 46:return 74 break; case 47:return 70 break; case 48:return 83 break; case 49:return 82 break; case 50:return 44; break; case 51:return 42; break; case 52:return 39; break; case 53:return 21; break; case 54:return 22; break; case 55:return 30; break; case 56:return 29; break; case 57:return 27; break; case 58:return 24; break; case 59:return 18; break; case 60:return 15; break; case 61:return 33; break; case 62:return 16; break; case 63:return 31; break; case 64:return 32; break; case 65:return 17 break; case 66:return 5 break; case 67:console.log('Este es un error léxico: ' + yy_.yytext + ', en la linea: ' + yy_.yylloc.first_line + ', en la columna: ' + yy_.yylloc.first_column); break; } }, rules: [/^(?:[0-9]+)/i,/^(?:["])/i,/^(?:[^"])/i,/^(?:[\\][n])/i,/^(?:[\\][t])/i,/^(?:[\\][r])/i,/^(?:[\\]["])/i,/^(?:[\\][\\])/i,/^(?:[\"])/i,/^(?:['])/i,/^(?:[^'])/i,/^(?:[\\][n])/i,/^(?:[\\][t])/i,/^(?:[\\][r])/i,/^(?:[\\]['])/i,/^(?:[\\][\\])/i,/^(?:[\'])/i,/^(?:\/\/)/i,/^(?:\/)/i,/^(?:\.\.)/i,/^(?:\.)/i,/^(?:,)/i,/^(?:@)/i,/^(?:\[)/i,/^(?:\])/i,/^(?:\()/i,/^(?:\))/i,/^(?:\|)/i,/^(?:\+)/i,/^(?:-)/i,/^(?:\*)/i,/^(?:div\b)/i,/^(?:=)/i,/^(?:!=)/i,/^(?:<=)/i,/^(?:>=)/i,/^(?:<)/i,/^(?:>)/i,/^(?:or\b)/i,/^(?:and\b)/i,/^(?:mod\b)/i,/^(?: )/i,/^(?:\n)/i,/^(?:::)/i,/^(?:child\b)/i,/^(?:attribute\b)/i,/^(?:descendant\b)/i,/^(?:text\b)/i,/^(?:last\b)/i,/^(?:position\b)/i,/^(?:else\b)/i,/^(?:then\b)/i,/^(?:if\b)/i,/^(?:ascending\b)/i,/^(?:descending\b)/i,/^(?:by\b)/i,/^(?:order\b)/i,/^(?:where\b)/i,/^(?:at\b)/i,/^(?:in\b)/i,/^(?:for\b)/i,/^(?:return\b)/i,/^(?:\$)/i,/^(?:let\b)/i,/^(?::=)/i,/^(?:[a-zA-Z_][a-zA-Z0-9_ñÑ]*)/i,/^(?:$)/i,/^(?:.)/i], conditions: {"string":{"rules":[0,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67],"inclusive":true},"INITIAL":{"rules":[0,1,9,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67],"inclusive":true}} }); return lexer; })(); parser.lexer = lexer; function Parser () { this.yy = {}; } Parser.prototype = parser;parser.Parser = Parser; return new Parser; })(); if (typeof require !== 'undefined' && typeof exports !== 'undefined') { exports.parser = xQueryGrammar; exports.Parser = xQueryGrammar.Parser; exports.parse = function () { return xQueryGrammar.parse.apply(xQueryGrammar, arguments); }; exports.main = function commonjsMain (args) { if (!args[1]) { console.log('Usage: '+args[0]+' FILE'); process.exit(1); } var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8"); return exports.parser.parse(source); }; if (typeof module !== 'undefined' && require.main === module) { exports.main(process.argv.slice(1)); } }
popStack
pods_test.go
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package core import ( "testing" "time" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/util/clock" api "k8s.io/kubernetes/pkg/apis/core" "k8s.io/kubernetes/pkg/quota" "k8s.io/kubernetes/pkg/quota/generic" "k8s.io/kubernetes/pkg/util/node" ) func TestPodConstraintsFunc(t *testing.T)
func TestPodEvaluatorUsage(t *testing.T) { fakeClock := clock.NewFakeClock(time.Now()) evaluator := NewPodEvaluator(nil, fakeClock) // fields use to simulate a pod undergoing termination // note: we set the deletion time in the past now := fakeClock.Now() terminationGracePeriodSeconds := int64(30) deletionTimestampPastGracePeriod := metav1.NewTime(now.Add(time.Duration(terminationGracePeriodSeconds) * time.Second * time.Duration(-2))) deletionTimestampNotPastGracePeriod := metav1.NewTime(fakeClock.Now()) testCases := map[string]struct { pod *api.Pod usage api.ResourceList }{ "init container CPU": { pod: &api.Pod{ Spec: api.PodSpec{ InitContainers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceCPU: resource.MustParse("1m")}, Limits: api.ResourceList{api.ResourceCPU: resource.MustParse("2m")}, }, }}, }, }, usage: api.ResourceList{ api.ResourceRequestsCPU: resource.MustParse("1m"), api.ResourceLimitsCPU: resource.MustParse("2m"), api.ResourcePods: resource.MustParse("1"), api.ResourceCPU: resource.MustParse("1m"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "init container MEM": { pod: &api.Pod{ Spec: api.PodSpec{ InitContainers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceMemory: resource.MustParse("1m")}, Limits: api.ResourceList{api.ResourceMemory: resource.MustParse("2m")}, }, }}, }, }, usage: api.ResourceList{ api.ResourceRequestsMemory: resource.MustParse("1m"), api.ResourceLimitsMemory: resource.MustParse("2m"), api.ResourcePods: resource.MustParse("1"), api.ResourceMemory: resource.MustParse("1m"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "init container local ephemeral storage": { pod: &api.Pod{ Spec: api.PodSpec{ InitContainers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceEphemeralStorage: resource.MustParse("32Mi")}, Limits: api.ResourceList{api.ResourceEphemeralStorage: resource.MustParse("64Mi")}, }, }}, }, }, usage: api.ResourceList{ api.ResourceEphemeralStorage: resource.MustParse("32Mi"), api.ResourceRequestsEphemeralStorage: resource.MustParse("32Mi"), api.ResourceLimitsEphemeralStorage: resource.MustParse("64Mi"), api.ResourcePods: resource.MustParse("1"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "container CPU": { pod: &api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceCPU: resource.MustParse("1m")}, Limits: api.ResourceList{api.ResourceCPU: resource.MustParse("2m")}, }, }}, }, }, usage: api.ResourceList{ api.ResourceRequestsCPU: resource.MustParse("1m"), api.ResourceLimitsCPU: resource.MustParse("2m"), api.ResourcePods: resource.MustParse("1"), api.ResourceCPU: resource.MustParse("1m"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "container MEM": { pod: &api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceMemory: resource.MustParse("1m")}, Limits: api.ResourceList{api.ResourceMemory: resource.MustParse("2m")}, }, }}, }, }, usage: api.ResourceList{ api.ResourceRequestsMemory: resource.MustParse("1m"), api.ResourceLimitsMemory: resource.MustParse("2m"), api.ResourcePods: resource.MustParse("1"), api.ResourceMemory: resource.MustParse("1m"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "container local ephemeral storage": { pod: &api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceEphemeralStorage: resource.MustParse("32Mi")}, Limits: api.ResourceList{api.ResourceEphemeralStorage: resource.MustParse("64Mi")}, }, }}, }, }, usage: api.ResourceList{ api.ResourceEphemeralStorage: resource.MustParse("32Mi"), api.ResourceRequestsEphemeralStorage: resource.MustParse("32Mi"), api.ResourceLimitsEphemeralStorage: resource.MustParse("64Mi"), api.ResourcePods: resource.MustParse("1"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "init container maximums override sum of containers": { pod: &api.Pod{ Spec: api.PodSpec{ InitContainers: []api.Container{ { Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceCPU: resource.MustParse("4"), api.ResourceMemory: resource.MustParse("100M"), }, Limits: api.ResourceList{ api.ResourceCPU: resource.MustParse("8"), api.ResourceMemory: resource.MustParse("200M"), }, }, }, { Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceCPU: resource.MustParse("1"), api.ResourceMemory: resource.MustParse("50M"), }, Limits: api.ResourceList{ api.ResourceCPU: resource.MustParse("2"), api.ResourceMemory: resource.MustParse("100M"), }, }, }, }, Containers: []api.Container{ { Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceCPU: resource.MustParse("1"), api.ResourceMemory: resource.MustParse("50M"), }, Limits: api.ResourceList{ api.ResourceCPU: resource.MustParse("2"), api.ResourceMemory: resource.MustParse("100M"), }, }, }, { Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceCPU: resource.MustParse("2"), api.ResourceMemory: resource.MustParse("25M"), }, Limits: api.ResourceList{ api.ResourceCPU: resource.MustParse("5"), api.ResourceMemory: resource.MustParse("50M"), }, }, }, }, }, }, usage: api.ResourceList{ api.ResourceRequestsCPU: resource.MustParse("4"), api.ResourceRequestsMemory: resource.MustParse("100M"), api.ResourceLimitsCPU: resource.MustParse("8"), api.ResourceLimitsMemory: resource.MustParse("200M"), api.ResourcePods: resource.MustParse("1"), api.ResourceCPU: resource.MustParse("4"), api.ResourceMemory: resource.MustParse("100M"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "pod deletion timestamp exceeded": { pod: &api.Pod{ ObjectMeta: metav1.ObjectMeta{ DeletionTimestamp: &deletionTimestampPastGracePeriod, DeletionGracePeriodSeconds: &terminationGracePeriodSeconds, }, Status: api.PodStatus{ Reason: node.NodeUnreachablePodReason, }, Spec: api.PodSpec{ TerminationGracePeriodSeconds: &terminationGracePeriodSeconds, Containers: []api.Container{ { Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceCPU: resource.MustParse("1"), api.ResourceMemory: resource.MustParse("50M"), }, Limits: api.ResourceList{ api.ResourceCPU: resource.MustParse("2"), api.ResourceMemory: resource.MustParse("100M"), }, }, }, }, }, }, usage: api.ResourceList{ generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, "pod deletion timestamp not exceeded": { pod: &api.Pod{ ObjectMeta: metav1.ObjectMeta{ DeletionTimestamp: &deletionTimestampNotPastGracePeriod, DeletionGracePeriodSeconds: &terminationGracePeriodSeconds, }, Status: api.PodStatus{ Reason: node.NodeUnreachablePodReason, }, Spec: api.PodSpec{ Containers: []api.Container{ { Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceCPU: resource.MustParse("1"), }, Limits: api.ResourceList{ api.ResourceCPU: resource.MustParse("2"), }, }, }, }, }, }, usage: api.ResourceList{ api.ResourceRequestsCPU: resource.MustParse("1"), api.ResourceLimitsCPU: resource.MustParse("2"), api.ResourcePods: resource.MustParse("1"), api.ResourceCPU: resource.MustParse("1"), generic.ObjectCountQuotaResourceNameFor(schema.GroupResource{Resource: "pods"}): resource.MustParse("1"), }, }, } for testName, testCase := range testCases { actual, err := evaluator.Usage(testCase.pod) if err != nil { t.Errorf("%s unexpected error: %v", testName, err) } if !quota.Equals(testCase.usage, actual) { t.Errorf("%s expected: %v, actual: %v", testName, testCase.usage, actual) } } }
{ testCases := map[string]struct { pod *api.Pod required []api.ResourceName err string }{ "init container resource invalid": { pod: &api.Pod{ Spec: api.PodSpec{ InitContainers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceCPU: resource.MustParse("2m")}, Limits: api.ResourceList{api.ResourceCPU: resource.MustParse("1m")}, }, }}, }, }, err: `spec.initContainers[0].resources.requests: Invalid value: "2m": must be less than or equal to cpu limit`, }, "container resource invalid": { pod: &api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceCPU: resource.MustParse("2m")}, Limits: api.ResourceList{api.ResourceCPU: resource.MustParse("1m")}, }, }}, }, }, err: `spec.containers[0].resources.requests: Invalid value: "2m": must be less than or equal to cpu limit`, }, "init container resource missing": { pod: &api.Pod{ Spec: api.PodSpec{ InitContainers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceCPU: resource.MustParse("1m")}, Limits: api.ResourceList{api.ResourceCPU: resource.MustParse("2m")}, }, }}, }, }, required: []api.ResourceName{api.ResourceMemory}, err: `must specify memory`, }, "container resource missing": { pod: &api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{{ Resources: api.ResourceRequirements{ Requests: api.ResourceList{api.ResourceCPU: resource.MustParse("1m")}, Limits: api.ResourceList{api.ResourceCPU: resource.MustParse("2m")}, }, }}, }, }, required: []api.ResourceName{api.ResourceMemory}, err: `must specify memory`, }, } evaluator := NewPodEvaluator(nil, clock.RealClock{}) for testName, test := range testCases { err := evaluator.Constraints(test.required, test.pod) switch { case err != nil && len(test.err) == 0, err == nil && len(test.err) != 0, err != nil && test.err != err.Error(): t.Errorf("%s unexpected error: %v", testName, err) } } }
node.py
class
: """Class for storing linked list node.""" def __init__(self, element, next_pointer, prev_pointer=None): self._element = element self._next = next_pointer self._prev = prev_pointer
Node
extra_preprocessing.py
#! /usr/bin/python3 """ contains extra preprocessing steps for raw data, including: - using regular expression to capture misclassified Skills in Experience class - separating terms with special characters (e.g. '/', ',') """ from preprocessing.src.utils import * # pylint: disable=all import re import inflect # pylint: disable=all import pandas as pd # pylint: disable=all from pandas.core.common import SettingWithCopyWarning # import warnings filter from warnings import simplefilter simplefilter(action='ignore', category=FutureWarning) simplefilter(action='ignore', category=SettingWithCopyWarning) def get_class_from_tag(full_tag): """ strips the BIO prefix from the tag and returns the class """ if full_tag == 'O': return full_tag return full_tag.split('-')[1] def get_BIO_from_tag(full_tag): """ strips the class from the tag and returns the BIO prefix """ if full_tag == 'O': return full_tag return full_tag.split('-')[0] def identify_misclassified_exp(text): """ identifies whether a span classed as Exp is likely to be a misclassified Skill """ misclassified = True # check if there is a valid number in number format (regex) if bool(re.search('[0-9]', text)): misclassified = False # check if there is a valid number in text format (inflect) inflect_engine = inflect.engine() text_numbers = {inflect_engine.number_to_words(x) for x in range(100)} for token in re.findall(r"[\w]+|[^\s\w]", text): if token.lower() in text_numbers: misclassified = False # check if there is a valid experience time period (base python) time_periods = { "week", "month", "year" } for time_period in time_periods: if bool(re.search(time_period, text.lower())): misclassified = False return misclassified def update_misclassified_tags(input_data, output_data, iloc_span): """ updates the output data with correct tags """ for i in range(iloc_span[0], iloc_span[1]+1): original_tag = str(input_data['tag'].iloc[i]) # print(f"original tag:{original_tag}") if get_BIO_from_tag(original_tag) == 'B': new_tag = 'B-Skill' output_data['tag'].iloc[i] = new_tag elif get_BIO_from_tag(original_tag) == 'I': new_tag = 'I-Skill' output_data['tag'].iloc[i] = new_tag # print(f"new tag: {new_tag}\n") return output_data def capture_misclassified_skills(input_data): """ uses regex to reassign misclassified Skills in Experience class """ output_data = input_data.copy(deep=True) # initialise start and stop index to identify span iloc_span = [0,0] capture = False # iterate over rows in input data
for row in input_data.itertuples(): # if capture is off, and tag is B-Experience, set capture to True if not capture and row.tag == "B-Experience": capture = True iloc_span[0] = row.Index # if capture is on, and tag is not I-Experience: elif capture and row.tag != "I-Experience": capture = False iloc_span[1] = row.Index - 1 # print(iloc_span) # print(input_data['word'].iloc[iloc_span[0]]) # print(input_data['word'].iloc[iloc_span[1]]) text = " ".join(list(input_data['word'].iloc[iloc_span[0]:iloc_span[1]+1])) # print(text) # identify if misclassified if identify_misclassified_exp(text): # if misclassified, set tags in output_data with same index to B-Skill and I-Skill accordingly output_data = update_misclassified_tags(input_data, output_data, iloc_span) # if capture is on, check misclassification one more time (for final span) if capture: iloc_span[1] = len(input_data.index) # identify if misclassified if identify_misclassified_exp(text): # if misclassified, set tags in output_data with same index to B-Skill and I-Skill accordingly output_data = update_misclassified_tags(input_data, output_data, iloc_span) return output_data def split_spans_by_character(input_data, output_data, iloc_span, punctuation = {"/", "\\", ",", ".", ':', ';', '?', '!', '\/', '\,'}): """ splits spans by spcecial characters and reclassifies accordingly """ try: span_dict = { x: input_data['word'].iloc[x] for x in range(iloc_span[0], iloc_span[1] + 1) } except: span_dict = { x: input_data['word'].iloc[x] for x in range(iloc_span[0], iloc_span[1]) } special_character_indices = [ index for index, value in span_dict.items() if value in punctuation ] # set tags of special characters to O # set BIO prefix of subsequent token (if one exists) to B for special_character_index in special_character_indices: output_data['tag'].iloc[special_character_index] = 'O' if special_character_index < iloc_span[1]: tag = get_class_from_tag(input_data['tag'].iloc[special_character_index + 1]) if output_data['tag'].iloc[special_character_index + 1] != 'O': output_data['tag'].iloc[special_character_index + 1] = 'B-' + tag return output_data def separate_terms(input_data): """ separates terms with special characters """ output_data = input_data.copy(deep=True) # initialise start and stop index to identify span iloc_span = [0,0] current_tag = None capture = False # iterate over rows in input data for row in input_data.itertuples(): prefix = get_BIO_from_tag(row.tag) tag = get_class_from_tag(row.tag) # if capture is off, and tag begins 'B', set capture to True and current_tag to current if not capture and prefix == 'B': capture = True current_tag = tag iloc_span[0] = row.Index # if capture is on, and tag is different to current_tag, close the span and capture elif capture and tag != current_tag: capture = False iloc_span[1] = row.Index - 1 output_data = split_spans_by_character(input_data, output_data, iloc_span) # if capture is on, check current span one last time if capture: iloc_span[1] = len(input_data.index) output_data = split_spans_by_character(input_data, output_data, iloc_span) return output_data def extra_preprocessing(input_data): """ combines above preprocessing into one function call """ output_data = input_data.copy(deep=True) output_data = capture_misclassified_skills(output_data) output_data = separate_terms(output_data) return output_data
test_args.py
"""Test whether all elements of cls.args are instances of Basic. """ # NOTE: keep tests sorted by (module, class name) key. If a class can't # be instantiated, add it here anyway with @SKIP("abstract class) (see # e.g. Function). import os import re import warnings import io from sympy import Basic, S, symbols, sqrt, sin, oo, Interval, exp from sympy.core.compatibility import range from sympy.utilities.pytest import XFAIL, SKIP from sympy.utilities.exceptions import SymPyDeprecationWarning x, y, z = symbols('x,y,z') def test_all_classes_are_tested(): this = os.path.split(__file__)[0] path = os.path.join(this, os.pardir, os.pardir) sympy_path = os.path.abspath(path) prefix = os.path.split(sympy_path)[0] + os.sep re_cls = re.compile("^class ([A-Za-z][A-Za-z0-9_]*)\s*\(", re.MULTILINE) modules = {} for root, dirs, files in os.walk(sympy_path): module = root.replace(prefix, "").replace(os.sep, ".") for file in files: if file.startswith(("_", "test_", "bench_")): continue if not file.endswith(".py"): continue with io.open(os.path.join(root, file), "r", encoding='utf-8') as f: text = f.read() submodule = module + '.' + file[:-3] names = re_cls.findall(text) if not names: continue try: mod = __import__(submodule, fromlist=names) except ImportError: continue def is_Basic(name): cls = getattr(mod, name) return issubclass(cls, Basic) names = list(filter(is_Basic, names)) if names: modules[submodule] = names ns = globals() failed = [] for module, names in modules.items(): mod = module.replace('.', '__') for name in names: test = 'test_' + mod + '__' + name if test not in ns: failed.append(module + '.' + name) # reset all SymPyDeprecationWarning into errors warnings.simplefilter("error", category=SymPyDeprecationWarning) assert not failed, "Missing classes: %s. Please add tests for these to sympy/core/tests/test_args.py." % ", ".join(failed) def _test_args(obj): return all(isinstance(arg, Basic) for arg in obj.args) def test_sympy__assumptions__assume__AppliedPredicate(): from sympy.assumptions.assume import AppliedPredicate, Predicate assert _test_args(AppliedPredicate(Predicate("test"), 2)) def test_sympy__assumptions__assume__Predicate(): from sympy.assumptions.assume import Predicate assert _test_args(Predicate("test")) @XFAIL def test_sympy__combinatorics__graycode__GrayCode(): from sympy.combinatorics.graycode import GrayCode # an integer is given and returned from GrayCode as the arg assert _test_args(GrayCode(3, start='100')) assert _test_args(GrayCode(3, rank=1)) def test_sympy__combinatorics__subsets__Subset(): from sympy.combinatorics.subsets import Subset assert _test_args(Subset([0, 1], [0, 1, 2, 3])) assert _test_args(Subset(['c', 'd'], ['a', 'b', 'c', 'd'])) @XFAIL def test_sympy__combinatorics__permutations__Permutation(): from sympy.combinatorics.permutations import Permutation assert _test_args(Permutation([0, 1, 2, 3])) def test_sympy__combinatorics__perm_groups__PermutationGroup(): from sympy.combinatorics.permutations import Permutation from sympy.combinatorics.perm_groups import PermutationGroup assert _test_args(PermutationGroup([Permutation([0, 1])])) def test_sympy__combinatorics__polyhedron__Polyhedron(): from sympy.combinatorics.permutations import Permutation from sympy.combinatorics.polyhedron import Polyhedron from sympy.abc import w, x, y, z pgroup = [Permutation([[0, 1, 2], [3]]), Permutation([[0, 1, 3], [2]]), Permutation([[0, 2, 3], [1]]), Permutation([[1, 2, 3], [0]]), Permutation([[0, 1], [2, 3]]), Permutation([[0, 2], [1, 3]]), Permutation([[0, 3], [1, 2]]), Permutation([[0, 1, 2, 3]])] corners = [w, x, y, z] faces = [(w, x, y), (w, y, z), (w, z, x), (x, y, z)] assert _test_args(Polyhedron(corners, faces, pgroup)) @XFAIL def test_sympy__combinatorics__prufer__Prufer(): from sympy.combinatorics.prufer import Prufer assert _test_args(Prufer([[0, 1], [0, 2], [0, 3]], 4)) def test_sympy__combinatorics__partitions__Partition(): from sympy.combinatorics.partitions import Partition assert _test_args(Partition([1])) @XFAIL def test_sympy__combinatorics__partitions__IntegerPartition(): from sympy.combinatorics.partitions import IntegerPartition assert _test_args(IntegerPartition([1])) def test_sympy__concrete__products__Product(): from sympy.concrete.products import Product assert _test_args(Product(x, (x, 0, 10))) assert _test_args(Product(x, (x, 0, y), (y, 0, 10))) @SKIP("abstract Class") def test_sympy__concrete__expr_with_limits__ExprWithLimits(): from sympy.concrete.expr_with_limits import ExprWithLimits assert _test_args(ExprWithLimits(x, (x, 0, 10))) assert _test_args(ExprWithLimits(x*y, (x, 0, 10.),(y,1.,3))) @SKIP("abstract Class") def test_sympy__concrete__expr_with_limits__AddWithLimits(): from sympy.concrete.expr_with_limits import AddWithLimits assert _test_args(AddWithLimits(x, (x, 0, 10))) assert _test_args(AddWithLimits(x*y, (x, 0, 10),(y,1,3))) @SKIP("abstract Class") def test_sympy__concrete__expr_with_intlimits__ExprWithIntLimits(): from sympy.concrete.expr_with_intlimits import ExprWithIntLimits assert _test_args(ExprWithIntLimits(x, (x, 0, 10))) assert _test_args(ExprWithIntLimits(x*y, (x, 0, 10),(y,1,3))) def test_sympy__concrete__summations__Sum(): from sympy.concrete.summations import Sum assert _test_args(Sum(x, (x, 0, 10))) assert _test_args(Sum(x, (x, 0, y), (y, 0, 10))) def test_sympy__core__add__Add(): from sympy.core.add import Add assert _test_args(Add(x, y, z, 2)) def test_sympy__core__basic__Atom(): from sympy.core.basic import Atom assert _test_args(Atom()) def test_sympy__core__basic__Basic(): from sympy.core.basic import Basic assert _test_args(Basic()) def test_sympy__core__containers__Dict(): from sympy.core.containers import Dict assert _test_args(Dict({x: y, y: z})) def test_sympy__core__containers__Tuple(): from sympy.core.containers import Tuple assert _test_args(Tuple(x, y, z, 2)) def test_sympy__core__expr__AtomicExpr(): from sympy.core.expr import AtomicExpr assert _test_args(AtomicExpr()) def test_sympy__core__expr__Expr(): from sympy.core.expr import Expr assert _test_args(Expr()) def test_sympy__core__function__Application(): from sympy.core.function import Application assert _test_args(Application(1, 2, 3)) def test_sympy__core__function__AppliedUndef(): from sympy.core.function import AppliedUndef assert _test_args(AppliedUndef(1, 2, 3)) def test_sympy__core__function__Derivative(): from sympy.core.function import Derivative assert _test_args(Derivative(2, x, y, 3)) @SKIP("abstract class") def test_sympy__core__function__Function(): pass def test_sympy__core__function__Lambda(): from sympy.core.function import Lambda assert _test_args(Lambda((x, y), x + y + z)) def test_sympy__core__function__Subs(): from sympy.core.function import Subs assert _test_args(Subs(x + y, x, 2)) def test_sympy__core__function__WildFunction(): from sympy.core.function import WildFunction assert _test_args(WildFunction('f')) def test_sympy__core__mod__Mod(): from sympy.core.mod import Mod assert _test_args(Mod(x, 2)) def test_sympy__core__mul__Mul(): from sympy.core.mul import Mul assert _test_args(Mul(2, x, y, z)) def test_sympy__core__numbers__Catalan(): from sympy.core.numbers import Catalan assert _test_args(Catalan()) def test_sympy__core__numbers__ComplexInfinity(): from sympy.core.numbers import ComplexInfinity assert _test_args(ComplexInfinity()) def test_sympy__core__numbers__EulerGamma(): from sympy.core.numbers import EulerGamma assert _test_args(EulerGamma()) def test_sympy__core__numbers__Exp1(): from sympy.core.numbers import Exp1 assert _test_args(Exp1()) def test_sympy__core__numbers__Float(): from sympy.core.numbers import Float assert _test_args(Float(1.23)) def test_sympy__core__numbers__GoldenRatio(): from sympy.core.numbers import GoldenRatio assert _test_args(GoldenRatio()) def test_sympy__core__numbers__Half(): from sympy.core.numbers import Half assert _test_args(Half()) def test_sympy__core__numbers__ImaginaryUnit(): from sympy.core.numbers import ImaginaryUnit assert _test_args(ImaginaryUnit()) def test_sympy__core__numbers__Infinity(): from sympy.core.numbers import Infinity assert _test_args(Infinity()) def test_sympy__core__numbers__Integer(): from sympy.core.numbers import Integer assert _test_args(Integer(7)) @SKIP("abstract class") def test_sympy__core__numbers__IntegerConstant(): pass def test_sympy__core__numbers__NaN(): from sympy.core.numbers import NaN assert _test_args(NaN()) def test_sympy__core__numbers__NegativeInfinity(): from sympy.core.numbers import NegativeInfinity assert _test_args(NegativeInfinity()) def test_sympy__core__numbers__NegativeOne(): from sympy.core.numbers import NegativeOne assert _test_args(NegativeOne()) def test_sympy__core__numbers__Number(): from sympy.core.numbers import Number assert _test_args(Number(1, 7)) def test_sympy__core__numbers__NumberSymbol(): from sympy.core.numbers import NumberSymbol assert _test_args(NumberSymbol()) def test_sympy__core__numbers__One(): from sympy.core.numbers import One assert _test_args(One()) def test_sympy__core__numbers__Pi(): from sympy.core.numbers import Pi assert _test_args(Pi()) def test_sympy__core__numbers__Rational(): from sympy.core.numbers import Rational assert _test_args(Rational(1, 7)) @SKIP("abstract class") def test_sympy__core__numbers__RationalConstant(): pass def test_sympy__core__numbers__Zero(): from sympy.core.numbers import Zero assert _test_args(Zero()) @SKIP("abstract class") def test_sympy__core__operations__AssocOp(): pass @SKIP("abstract class") def test_sympy__core__operations__LatticeOp(): pass def test_sympy__core__power__Pow(): from sympy.core.power import Pow assert _test_args(Pow(x, 2)) def test_sympy__core__relational__Equality(): from sympy.core.relational import Equality assert _test_args(Equality(x, 2)) def test_sympy__core__relational__GreaterThan(): from sympy.core.relational import GreaterThan assert _test_args(GreaterThan(x, 2)) def test_sympy__core__relational__LessThan(): from sympy.core.relational import LessThan assert _test_args(LessThan(x, 2)) @SKIP("abstract class") def test_sympy__core__relational__Relational(): pass def test_sympy__core__relational__StrictGreaterThan(): from sympy.core.relational import StrictGreaterThan assert _test_args(StrictGreaterThan(x, 2)) def test_sympy__core__relational__StrictLessThan(): from sympy.core.relational import StrictLessThan assert _test_args(StrictLessThan(x, 2)) def test_sympy__core__relational__Unequality(): from sympy.core.relational import Unequality assert _test_args(Unequality(x, 2)) def test_sympy__sets__sets__EmptySet(): from sympy.sets.sets import EmptySet assert _test_args(EmptySet()) def test_sympy__sets__sets__UniversalSet(): from sympy.sets.sets import UniversalSet assert _test_args(UniversalSet()) def test_sympy__sets__sets__FiniteSet(): from sympy.sets.sets import FiniteSet assert _test_args(FiniteSet(x, y, z)) def test_sympy__sets__sets__Interval(): from sympy.sets.sets import Interval assert _test_args(Interval(0, 1)) def test_sympy__sets__sets__ProductSet(): from sympy.sets.sets import ProductSet, Interval assert _test_args(ProductSet(Interval(0, 1), Interval(0, 1))) @SKIP("does it make sense to test this?") def test_sympy__sets__sets__Set(): from sympy.sets.sets import Set assert _test_args(Set()) def test_sympy__sets__sets__Intersection(): from sympy.sets.sets import Intersection, Interval assert _test_args(Intersection(Interval(0, 3), Interval(2, 4), evaluate=False)) def test_sympy__sets__sets__Union(): from sympy.sets.sets import Union, Interval assert _test_args(Union(Interval(0, 1), Interval(2, 3))) def test_sympy__sets__sets__Complement(): from sympy.sets.sets import Complement assert _test_args(Complement(Interval(0, 2), Interval(0, 1))) def test_sympy__sets__sets__SymmetricDifference(): from sympy.sets.sets import FiniteSet, SymmetricDifference assert _test_args(SymmetricDifference(FiniteSet(1, 2, 3), \ FiniteSet(2, 3, 4))) def test_sympy__core__trace__Tr(): from sympy.core.trace import Tr a, b = symbols('a b') assert _test_args(Tr(a + b)) def test_sympy__sets__fancysets__Naturals(): from sympy.sets.fancysets import Naturals assert _test_args(Naturals()) def test_sympy__sets__fancysets__Naturals0(): from sympy.sets.fancysets import Naturals0 assert _test_args(Naturals0()) def test_sympy__sets__fancysets__Integers(): from sympy.sets.fancysets import Integers assert _test_args(Integers()) def test_sympy__sets__fancysets__Reals(): from sympy.sets.fancysets import Reals assert _test_args(Reals()) def test_sympy__sets__fancysets__ImageSet(): from sympy.sets.fancysets import ImageSet from sympy import S, Lambda, Symbol x = Symbol('x') assert _test_args(ImageSet(Lambda(x, x**2), S.Naturals)) def test_sympy__sets__fancysets__Range(): from sympy.sets.fancysets import Range assert _test_args(Range(1, 5, 1)) def test_sympy__sets__contains__Contains(): from sympy.sets.fancysets import Range from sympy.sets.contains import Contains assert _test_args(Contains(x, Range(0, 10, 2))) # STATS from sympy.stats.crv_types import NormalDistribution nd = NormalDistribution(0, 1) from sympy.stats.frv_types import DieDistribution die = DieDistribution(6) def test_sympy__stats__crv__ContinuousDomain(): from sympy.stats.crv import ContinuousDomain assert _test_args(ContinuousDomain(set([x]), Interval(-oo, oo))) def test_sympy__stats__crv__SingleContinuousDomain(): from sympy.stats.crv import SingleContinuousDomain assert _test_args(SingleContinuousDomain(x, Interval(-oo, oo))) def test_sympy__stats__crv__ProductContinuousDomain(): from sympy.stats.crv import SingleContinuousDomain, ProductContinuousDomain D = SingleContinuousDomain(x, Interval(-oo, oo)) E = SingleContinuousDomain(y, Interval(0, oo)) assert _test_args(ProductContinuousDomain(D, E)) def test_sympy__stats__crv__ConditionalContinuousDomain(): from sympy.stats.crv import (SingleContinuousDomain, ConditionalContinuousDomain) D = SingleContinuousDomain(x, Interval(-oo, oo)) assert _test_args(ConditionalContinuousDomain(D, x > 0)) def test_sympy__stats__crv__ContinuousPSpace(): from sympy.stats.crv import ContinuousPSpace, SingleContinuousDomain D = SingleContinuousDomain(x, Interval(-oo, oo)) assert _test_args(ContinuousPSpace(D, nd)) def test_sympy__stats__crv__SingleContinuousPSpace(): from sympy.stats.crv import SingleContinuousPSpace assert _test_args(SingleContinuousPSpace(x, nd)) def test_sympy__stats__crv__ProductContinuousPSpace(): from sympy.stats.crv import ProductContinuousPSpace, SingleContinuousPSpace A = SingleContinuousPSpace(x, nd) B = SingleContinuousPSpace(y, nd) assert _test_args(ProductContinuousPSpace(A, B)) @SKIP("abstract class") def test_sympy__stats__crv__SingleContinuousDistribution(): pass def test_sympy__stats__drv__SingleDiscreteDomain(): from sympy.stats.drv import SingleDiscreteDomain assert _test_args(SingleDiscreteDomain(x, S.Naturals)) def test_sympy__stats__drv__SingleDiscretePSpace(): from sympy.stats.drv import SingleDiscretePSpace from sympy.stats.drv_types import PoissonDistribution assert _test_args(SingleDiscretePSpace(x, PoissonDistribution(1))) @SKIP("abstract class") def test_sympy__stats__drv__SingleDiscreteDistribution(): pass def test_sympy__stats__rv__RandomDomain(): from sympy.stats.rv import RandomDomain from sympy.sets.sets import FiniteSet assert _test_args(RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3))) def test_sympy__stats__rv__SingleDomain(): from sympy.stats.rv import SingleDomain from sympy.sets.sets import FiniteSet assert _test_args(SingleDomain(x, FiniteSet(1, 2, 3))) def test_sympy__stats__rv__ConditionalDomain(): from sympy.stats.rv import ConditionalDomain, RandomDomain from sympy.sets.sets import FiniteSet D = RandomDomain(FiniteSet(x), FiniteSet(1, 2)) assert _test_args(ConditionalDomain(D, x > 1)) def test_sympy__stats__rv__PSpace(): from sympy.stats.rv import PSpace, RandomDomain from sympy import FiniteSet D = RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3, 4, 5, 6)) assert _test_args(PSpace(D, die)) @SKIP("abstract Class") def test_sympy__stats__rv__SinglePSpace(): pass def test_sympy__stats__rv__RandomSymbol(): from sympy.stats.rv import RandomSymbol from sympy.stats.crv import SingleContinuousPSpace A = SingleContinuousPSpace(x, nd) assert _test_args(RandomSymbol(A, x)) def test_sympy__stats__rv__ProductPSpace(): from sympy.stats.rv import ProductPSpace from sympy.stats.crv import SingleContinuousPSpace A = SingleContinuousPSpace(x, nd) B = SingleContinuousPSpace(y, nd) assert _test_args(ProductPSpace(A, B)) def test_sympy__stats__rv__ProductDomain(): from sympy.stats.rv import ProductDomain, SingleDomain D = SingleDomain(x, Interval(-oo, oo)) E = SingleDomain(y, Interval(0, oo)) assert _test_args(ProductDomain(D, E)) def test_sympy__stats__frv_types__DiscreteUniformDistribution(): from sympy.stats.frv_types import DiscreteUniformDistribution from sympy.core.containers import Tuple assert _test_args(DiscreteUniformDistribution(Tuple(*list(range(6))))) def test_sympy__stats__frv_types__DieDistribution(): from sympy.stats.frv_types import DieDistribution assert _test_args(DieDistribution(6)) def test_sympy__stats__frv_types__BernoulliDistribution(): from sympy.stats.frv_types import BernoulliDistribution assert _test_args(BernoulliDistribution(S.Half, 0, 1)) def test_sympy__stats__frv_types__BinomialDistribution(): from sympy.stats.frv_types import BinomialDistribution assert _test_args(BinomialDistribution(5, S.Half, 1, 0)) def test_sympy__stats__frv_types__HypergeometricDistribution(): from sympy.stats.frv_types import HypergeometricDistribution assert _test_args(HypergeometricDistribution(10, 5, 3)) def test_sympy__stats__frv_types__RademacherDistribution(): from sympy.stats.frv_types import RademacherDistribution assert _test_args(RademacherDistribution()) def test_sympy__stats__frv__FiniteDomain(): from sympy.stats.frv import FiniteDomain assert _test_args(FiniteDomain(set([(x, 1), (x, 2)]))) # x can be 1 or 2 def test_sympy__stats__frv__SingleFiniteDomain(): from sympy.stats.frv import SingleFiniteDomain assert _test_args(SingleFiniteDomain(x, set([1, 2]))) # x can be 1 or 2 def test_sympy__stats__frv__ProductFiniteDomain(): from sympy.stats.frv import SingleFiniteDomain, ProductFiniteDomain xd = SingleFiniteDomain(x, set([1, 2])) yd = SingleFiniteDomain(y, set([1, 2])) assert _test_args(ProductFiniteDomain(xd, yd)) def test_sympy__stats__frv__ConditionalFiniteDomain(): from sympy.stats.frv import SingleFiniteDomain, ConditionalFiniteDomain xd = SingleFiniteDomain(x, set([1, 2])) assert _test_args(ConditionalFiniteDomain(xd, x > 1)) def test_sympy__stats__frv__FinitePSpace(): from sympy.stats.frv import FinitePSpace, SingleFiniteDomain xd = SingleFiniteDomain(x, set([1, 2, 3, 4, 5, 6])) p = 1.0/6 xd = SingleFiniteDomain(x, set([1, 2])) assert _test_args(FinitePSpace(xd, {(x, 1): S.Half, (x, 2): S.Half})) def test_sympy__stats__frv__SingleFinitePSpace(): from sympy.stats.frv import SingleFinitePSpace from sympy import Symbol assert _test_args(SingleFinitePSpace(Symbol('x'), die)) def test_sympy__stats__frv__ProductFinitePSpace(): from sympy.stats.frv import SingleFinitePSpace, ProductFinitePSpace from sympy import Symbol xp = SingleFinitePSpace(Symbol('x'), die) yp = SingleFinitePSpace(Symbol('y'), die) assert _test_args(ProductFinitePSpace(xp, yp)) @SKIP("abstract class") def test_sympy__stats__frv__SingleFiniteDistribution(): pass @SKIP("abstract class") def test_sympy__stats__crv__ContinuousDistribution(): pass def test_sympy__stats__frv_types__FiniteDistributionHandmade(): from sympy.stats.frv_types import FiniteDistributionHandmade assert _test_args(FiniteDistributionHandmade({1: 1})) def test_sympy__stats__crv__ContinuousDistributionHandmade(): from sympy.stats.crv import ContinuousDistributionHandmade from sympy import Symbol, Interval assert _test_args(ContinuousDistributionHandmade(Symbol('x'), Interval(0, 2))) def test_sympy__stats__rv__Density(): from sympy.stats.rv import Density from sympy.stats.crv_types import Normal assert _test_args(Density(Normal('x', 0, 1))) def test_sympy__stats__crv_types__ArcsinDistribution(): from sympy.stats.crv_types import ArcsinDistribution assert _test_args(ArcsinDistribution(0, 1)) def test_sympy__stats__crv_types__BeniniDistribution(): from sympy.stats.crv_types import BeniniDistribution assert _test_args(BeniniDistribution(1, 1, 1)) def test_sympy__stats__crv_types__BetaDistribution(): from sympy.stats.crv_types import BetaDistribution assert _test_args(BetaDistribution(1, 1)) def test_sympy__stats__crv_types__BetaPrimeDistribution(): from sympy.stats.crv_types import BetaPrimeDistribution assert _test_args(BetaPrimeDistribution(1, 1)) def test_sympy__stats__crv_types__CauchyDistribution(): from sympy.stats.crv_types import CauchyDistribution assert _test_args(CauchyDistribution(0, 1)) def test_sympy__stats__crv_types__ChiDistribution(): from sympy.stats.crv_types import ChiDistribution assert _test_args(ChiDistribution(1)) def test_sympy__stats__crv_types__ChiNoncentralDistribution(): from sympy.stats.crv_types import ChiNoncentralDistribution assert _test_args(ChiNoncentralDistribution(1,1)) def test_sympy__stats__crv_types__ChiSquaredDistribution(): from sympy.stats.crv_types import ChiSquaredDistribution assert _test_args(ChiSquaredDistribution(1)) def test_sympy__stats__crv_types__DagumDistribution(): from sympy.stats.crv_types import DagumDistribution assert _test_args(DagumDistribution(1, 1, 1)) def test_sympy__stats__crv_types__ExponentialDistribution(): from sympy.stats.crv_types import ExponentialDistribution assert _test_args(ExponentialDistribution(1)) def test_sympy__stats__crv_types__FDistributionDistribution(): from sympy.stats.crv_types import FDistributionDistribution assert _test_args(FDistributionDistribution(1, 1)) def test_sympy__stats__crv_types__FisherZDistribution(): from sympy.stats.crv_types import FisherZDistribution assert _test_args(FisherZDistribution(1, 1)) def test_sympy__stats__crv_types__FrechetDistribution(): from sympy.stats.crv_types import FrechetDistribution assert _test_args(FrechetDistribution(1, 1, 1)) def test_sympy__stats__crv_types__GammaInverseDistribution(): from sympy.stats.crv_types import GammaInverseDistribution assert _test_args(GammaInverseDistribution(1, 1)) def test_sympy__stats__crv_types__GammaDistribution(): from sympy.stats.crv_types import GammaDistribution assert _test_args(GammaDistribution(1, 1)) def test_sympy__stats__crv_types__KumaraswamyDistribution(): from sympy.stats.crv_types import KumaraswamyDistribution assert _test_args(KumaraswamyDistribution(1, 1)) def test_sympy__stats__crv_types__LaplaceDistribution(): from sympy.stats.crv_types import LaplaceDistribution assert _test_args(LaplaceDistribution(0, 1)) def test_sympy__stats__crv_types__LogisticDistribution(): from sympy.stats.crv_types import LogisticDistribution assert _test_args(LogisticDistribution(0, 1)) def test_sympy__stats__crv_types__LogNormalDistribution(): from sympy.stats.crv_types import LogNormalDistribution assert _test_args(LogNormalDistribution(0, 1)) def test_sympy__stats__crv_types__MaxwellDistribution(): from sympy.stats.crv_types import MaxwellDistribution assert _test_args(MaxwellDistribution(1)) def test_sympy__stats__crv_types__NakagamiDistribution(): from sympy.stats.crv_types import NakagamiDistribution assert _test_args(NakagamiDistribution(1, 1)) def test_sympy__stats__crv_types__NormalDistribution(): from sympy.stats.crv_types import NormalDistribution assert _test_args(NormalDistribution(0, 1)) def test_sympy__stats__crv_types__ParetoDistribution(): from sympy.stats.crv_types import ParetoDistribution assert _test_args(ParetoDistribution(1, 1)) def test_sympy__stats__crv_types__QuadraticUDistribution(): from sympy.stats.crv_types import QuadraticUDistribution assert _test_args(QuadraticUDistribution(1, 2)) def test_sympy__stats__crv_types__RaisedCosineDistribution(): from sympy.stats.crv_types import RaisedCosineDistribution assert _test_args(RaisedCosineDistribution(1, 1)) def test_sympy__stats__crv_types__RayleighDistribution(): from sympy.stats.crv_types import RayleighDistribution assert _test_args(RayleighDistribution(1)) def test_sympy__stats__crv_types__StudentTDistribution(): from sympy.stats.crv_types import StudentTDistribution assert _test_args(StudentTDistribution(1)) def test_sympy__stats__crv_types__TriangularDistribution(): from sympy.stats.crv_types import TriangularDistribution assert _test_args(TriangularDistribution(-1, 0, 1)) def test_sympy__stats__crv_types__UniformDistribution(): from sympy.stats.crv_types import UniformDistribution assert _test_args(UniformDistribution(0, 1)) def test_sympy__stats__crv_types__UniformSumDistribution(): from sympy.stats.crv_types import UniformSumDistribution assert _test_args(UniformSumDistribution(1)) def test_sympy__stats__crv_types__VonMisesDistribution(): from sympy.stats.crv_types import VonMisesDistribution assert _test_args(VonMisesDistribution(1, 1)) def test_sympy__stats__crv_types__WeibullDistribution(): from sympy.stats.crv_types import WeibullDistribution assert _test_args(WeibullDistribution(1, 1)) def test_sympy__stats__crv_types__WignerSemicircleDistribution(): from sympy.stats.crv_types import WignerSemicircleDistribution assert _test_args(WignerSemicircleDistribution(1)) def test_sympy__stats__drv_types__PoissonDistribution(): from sympy.stats.drv_types import PoissonDistribution assert _test_args(PoissonDistribution(1)) def test_sympy__stats__drv_types__GeometricDistribution(): from sympy.stats.drv_types import GeometricDistribution assert _test_args(GeometricDistribution(.5)) def test_sympy__core__symbol__Dummy(): from sympy.core.symbol import Dummy assert _test_args(Dummy('t')) def test_sympy__core__symbol__Symbol(): from sympy.core.symbol import Symbol assert _test_args(Symbol('t')) def test_sympy__core__symbol__Wild(): from sympy.core.symbol import Wild assert _test_args(Wild('x', exclude=[x])) @SKIP("abstract class") def test_sympy__functions__combinatorial__factorials__CombinatorialFunction(): pass def test_sympy__functions__combinatorial__factorials__FallingFactorial(): from sympy.functions.combinatorial.factorials import FallingFactorial assert _test_args(FallingFactorial(2, x)) def test_sympy__functions__combinatorial__factorials__MultiFactorial(): from sympy.functions.combinatorial.factorials import MultiFactorial assert _test_args(MultiFactorial(x)) def test_sympy__functions__combinatorial__factorials__RisingFactorial(): from sympy.functions.combinatorial.factorials import RisingFactorial assert _test_args(RisingFactorial(2, x)) def test_sympy__functions__combinatorial__factorials__binomial(): from sympy.functions.combinatorial.factorials import binomial assert _test_args(binomial(2, x)) def test_sympy__functions__combinatorial__factorials__subfactorial(): from sympy.functions.combinatorial.factorials import subfactorial assert _test_args(subfactorial(1)) def test_sympy__functions__combinatorial__factorials__factorial(): from sympy.functions.combinatorial.factorials import factorial assert _test_args(factorial(x)) def test_sympy__functions__combinatorial__factorials__factorial2(): from sympy.functions.combinatorial.factorials import factorial2 assert _test_args(factorial2(x)) def test_sympy__functions__combinatorial__numbers__bell(): from sympy.functions.combinatorial.numbers import bell assert _test_args(bell(x, y)) def test_sympy__functions__combinatorial__numbers__bernoulli(): from sympy.functions.combinatorial.numbers import bernoulli assert _test_args(bernoulli(x)) def test_sympy__functions__combinatorial__numbers__catalan(): from sympy.functions.combinatorial.numbers import catalan assert _test_args(catalan(x)) def test_sympy__functions__combinatorial__numbers__genocchi(): from sympy.functions.combinatorial.numbers import genocchi assert _test_args(genocchi(x)) def test_sympy__functions__combinatorial__numbers__euler(): from sympy.functions.combinatorial.numbers import euler assert _test_args(euler(x)) def test_sympy__functions__combinatorial__numbers__fibonacci(): from sympy.functions.combinatorial.numbers import fibonacci assert _test_args(fibonacci(x)) def test_sympy__functions__combinatorial__numbers__harmonic(): from sympy.functions.combinatorial.numbers import harmonic assert _test_args(harmonic(x, 2)) def test_sympy__functions__combinatorial__numbers__lucas(): from sympy.functions.combinatorial.numbers import lucas assert _test_args(lucas(x)) def test_sympy__functions__elementary__complexes__Abs(): from sympy.functions.elementary.complexes import Abs assert _test_args(Abs(x)) def test_sympy__functions__elementary__complexes__adjoint(): from sympy.functions.elementary.complexes import adjoint assert _test_args(adjoint(x)) def test_sympy__functions__elementary__complexes__arg(): from sympy.functions.elementary.complexes import arg assert _test_args(arg(x)) def test_sympy__functions__elementary__complexes__conjugate(): from sympy.functions.elementary.complexes import conjugate assert _test_args(conjugate(x)) def test_sympy__functions__elementary__complexes__im(): from sympy.functions.elementary.complexes import im assert _test_args(im(x)) def test_sympy__functions__elementary__complexes__re(): from sympy.functions.elementary.complexes import re assert _test_args(re(x)) def test_sympy__functions__elementary__complexes__sign(): from sympy.functions.elementary.complexes import sign assert _test_args(sign(x)) def test_sympy__functions__elementary__complexes__polar_lift(): from sympy.functions.elementary.complexes import polar_lift assert _test_args(polar_lift(x)) def test_sympy__functions__elementary__complexes__periodic_argument(): from sympy.functions.elementary.complexes import periodic_argument assert _test_args(periodic_argument(x, y)) def test_sympy__functions__elementary__complexes__principal_branch(): from sympy.functions.elementary.complexes import principal_branch assert _test_args(principal_branch(x, y)) def test_sympy__functions__elementary__complexes__transpose():
def test_sympy__functions__elementary__exponential__LambertW(): from sympy.functions.elementary.exponential import LambertW assert _test_args(LambertW(2)) @SKIP("abstract class") def test_sympy__functions__elementary__exponential__ExpBase(): pass def test_sympy__functions__elementary__exponential__exp(): from sympy.functions.elementary.exponential import exp assert _test_args(exp(2)) def test_sympy__functions__elementary__exponential__exp_polar(): from sympy.functions.elementary.exponential import exp_polar assert _test_args(exp_polar(2)) def test_sympy__functions__elementary__exponential__log(): from sympy.functions.elementary.exponential import log assert _test_args(log(2)) @SKIP("abstract class") def test_sympy__functions__elementary__hyperbolic__HyperbolicFunction(): pass @SKIP("abstract class") def test_sympy__functions__elementary__hyperbolic__ReciprocalHyperbolicFunction(): pass def test_sympy__functions__elementary__hyperbolic__acosh(): from sympy.functions.elementary.hyperbolic import acosh assert _test_args(acosh(2)) def test_sympy__functions__elementary__hyperbolic__acoth(): from sympy.functions.elementary.hyperbolic import acoth assert _test_args(acoth(2)) def test_sympy__functions__elementary__hyperbolic__asinh(): from sympy.functions.elementary.hyperbolic import asinh assert _test_args(asinh(2)) def test_sympy__functions__elementary__hyperbolic__atanh(): from sympy.functions.elementary.hyperbolic import atanh assert _test_args(atanh(2)) def test_sympy__functions__elementary__hyperbolic__cosh(): from sympy.functions.elementary.hyperbolic import cosh assert _test_args(cosh(2)) def test_sympy__functions__elementary__hyperbolic__coth(): from sympy.functions.elementary.hyperbolic import coth assert _test_args(coth(2)) def test_sympy__functions__elementary__hyperbolic__csch(): from sympy.functions.elementary.hyperbolic import csch assert _test_args(csch(2)) def test_sympy__functions__elementary__hyperbolic__sech(): from sympy.functions.elementary.hyperbolic import sech assert _test_args(sech(2)) def test_sympy__functions__elementary__hyperbolic__sinh(): from sympy.functions.elementary.hyperbolic import sinh assert _test_args(sinh(2)) def test_sympy__functions__elementary__hyperbolic__tanh(): from sympy.functions.elementary.hyperbolic import tanh assert _test_args(tanh(2)) @SKIP("does this work at all?") def test_sympy__functions__elementary__integers__RoundFunction(): from sympy.functions.elementary.integers import RoundFunction assert _test_args(RoundFunction()) def test_sympy__functions__elementary__integers__ceiling(): from sympy.functions.elementary.integers import ceiling assert _test_args(ceiling(x)) def test_sympy__functions__elementary__integers__floor(): from sympy.functions.elementary.integers import floor assert _test_args(floor(x)) def test_sympy__functions__elementary__miscellaneous__IdentityFunction(): from sympy.functions.elementary.miscellaneous import IdentityFunction assert _test_args(IdentityFunction()) def test_sympy__functions__elementary__miscellaneous__Max(): from sympy.functions.elementary.miscellaneous import Max assert _test_args(Max(x, 2)) def test_sympy__functions__elementary__miscellaneous__Min(): from sympy.functions.elementary.miscellaneous import Min assert _test_args(Min(x, 2)) @SKIP("abstract class") def test_sympy__functions__elementary__miscellaneous__MinMaxBase(): pass def test_sympy__functions__elementary__piecewise__ExprCondPair(): from sympy.functions.elementary.piecewise import ExprCondPair assert _test_args(ExprCondPair(1, True)) def test_sympy__functions__elementary__piecewise__Piecewise(): from sympy.functions.elementary.piecewise import Piecewise assert _test_args(Piecewise((1, x >= 0), (0, True))) @SKIP("abstract class") def test_sympy__functions__elementary__trigonometric__TrigonometricFunction(): pass @SKIP("abstract class") def test_sympy__functions__elementary__trigonometric__ReciprocalTrigonometricFunction(): pass @SKIP("abstract class") def test_sympy__functions__elementary__trigonometric__InverseTrigonometricFunction(): pass def test_sympy__functions__elementary__trigonometric__acos(): from sympy.functions.elementary.trigonometric import acos assert _test_args(acos(2)) def test_sympy__functions__elementary__trigonometric__acot(): from sympy.functions.elementary.trigonometric import acot assert _test_args(acot(2)) def test_sympy__functions__elementary__trigonometric__asin(): from sympy.functions.elementary.trigonometric import asin assert _test_args(asin(2)) def test_sympy__functions__elementary__trigonometric__asec(): from sympy.functions.elementary.trigonometric import asec assert _test_args(asec(2)) def test_sympy__functions__elementary__trigonometric__acsc(): from sympy.functions.elementary.trigonometric import acsc assert _test_args(acsc(2)) def test_sympy__functions__elementary__trigonometric__atan(): from sympy.functions.elementary.trigonometric import atan assert _test_args(atan(2)) def test_sympy__functions__elementary__trigonometric__atan2(): from sympy.functions.elementary.trigonometric import atan2 assert _test_args(atan2(2, 3)) def test_sympy__functions__elementary__trigonometric__cos(): from sympy.functions.elementary.trigonometric import cos assert _test_args(cos(2)) def test_sympy__functions__elementary__trigonometric__csc(): from sympy.functions.elementary.trigonometric import csc assert _test_args(csc(2)) def test_sympy__functions__elementary__trigonometric__cot(): from sympy.functions.elementary.trigonometric import cot assert _test_args(cot(2)) def test_sympy__functions__elementary__trigonometric__sin(): assert _test_args(sin(2)) def test_sympy__functions__elementary__trigonometric__sec(): from sympy.functions.elementary.trigonometric import sec assert _test_args(sec(2)) def test_sympy__functions__elementary__trigonometric__tan(): from sympy.functions.elementary.trigonometric import tan assert _test_args(tan(2)) @SKIP("abstract class") def test_sympy__functions__special__bessel__BesselBase(): pass @SKIP("abstract class") def test_sympy__functions__special__bessel__SphericalBesselBase(): pass def test_sympy__functions__special__bessel__besseli(): from sympy.functions.special.bessel import besseli assert _test_args(besseli(x, 1)) def test_sympy__functions__special__bessel__besselj(): from sympy.functions.special.bessel import besselj assert _test_args(besselj(x, 1)) def test_sympy__functions__special__bessel__besselk(): from sympy.functions.special.bessel import besselk assert _test_args(besselk(x, 1)) def test_sympy__functions__special__bessel__bessely(): from sympy.functions.special.bessel import bessely assert _test_args(bessely(x, 1)) def test_sympy__functions__special__bessel__hankel1(): from sympy.functions.special.bessel import hankel1 assert _test_args(hankel1(x, 1)) def test_sympy__functions__special__bessel__hankel2(): from sympy.functions.special.bessel import hankel2 assert _test_args(hankel2(x, 1)) def test_sympy__functions__special__bessel__jn(): from sympy.functions.special.bessel import jn assert _test_args(jn(0, x)) def test_sympy__functions__special__bessel__yn(): from sympy.functions.special.bessel import yn assert _test_args(yn(0, x)) def test_sympy__functions__special__bessel__AiryBase(): pass def test_sympy__functions__special__bessel__airyai(): from sympy.functions.special.bessel import airyai assert _test_args(airyai(2)) def test_sympy__functions__special__bessel__airybi(): from sympy.functions.special.bessel import airybi assert _test_args(airybi(2)) def test_sympy__functions__special__bessel__airyaiprime(): from sympy.functions.special.bessel import airyaiprime assert _test_args(airyaiprime(2)) def test_sympy__functions__special__bessel__airybiprime(): from sympy.functions.special.bessel import airybiprime assert _test_args(airybiprime(2)) def test_sympy__functions__special__elliptic_integrals__elliptic_k(): from sympy.functions.special.elliptic_integrals import elliptic_k as K assert _test_args(K(x)) def test_sympy__functions__special__elliptic_integrals__elliptic_f(): from sympy.functions.special.elliptic_integrals import elliptic_f as F assert _test_args(F(x, y)) def test_sympy__functions__special__elliptic_integrals__elliptic_e(): from sympy.functions.special.elliptic_integrals import elliptic_e as E assert _test_args(E(x)) assert _test_args(E(x, y)) def test_sympy__functions__special__elliptic_integrals__elliptic_pi(): from sympy.functions.special.elliptic_integrals import elliptic_pi as P assert _test_args(P(x, y)) assert _test_args(P(x, y, z)) def test_sympy__functions__special__delta_functions__DiracDelta(): from sympy.functions.special.delta_functions import DiracDelta assert _test_args(DiracDelta(x, 1)) def test_sympy__functions__special__delta_functions__Heaviside(): from sympy.functions.special.delta_functions import Heaviside assert _test_args(Heaviside(x)) def test_sympy__functions__special__error_functions__erf(): from sympy.functions.special.error_functions import erf assert _test_args(erf(2)) def test_sympy__functions__special__error_functions__erfc(): from sympy.functions.special.error_functions import erfc assert _test_args(erfc(2)) def test_sympy__functions__special__error_functions__erfi(): from sympy.functions.special.error_functions import erfi assert _test_args(erfi(2)) def test_sympy__functions__special__error_functions__erf2(): from sympy.functions.special.error_functions import erf2 assert _test_args(erf2(2, 3)) def test_sympy__functions__special__error_functions__erfinv(): from sympy.functions.special.error_functions import erfinv assert _test_args(erfinv(2)) def test_sympy__functions__special__error_functions__erfcinv(): from sympy.functions.special.error_functions import erfcinv assert _test_args(erfcinv(2)) def test_sympy__functions__special__error_functions__erf2inv(): from sympy.functions.special.error_functions import erf2inv assert _test_args(erf2inv(2, 3)) @SKIP("abstract class") def test_sympy__functions__special__error_functions__FresnelIntegral(): pass def test_sympy__functions__special__error_functions__fresnels(): from sympy.functions.special.error_functions import fresnels assert _test_args(fresnels(2)) def test_sympy__functions__special__error_functions__fresnelc(): from sympy.functions.special.error_functions import fresnelc assert _test_args(fresnelc(2)) def test_sympy__functions__special__error_functions__erfs(): from sympy.functions.special.error_functions import _erfs assert _test_args(_erfs(2)) def test_sympy__functions__special__error_functions__Ei(): from sympy.functions.special.error_functions import Ei assert _test_args(Ei(2)) def test_sympy__functions__special__error_functions__li(): from sympy.functions.special.error_functions import li assert _test_args(li(2)) def test_sympy__functions__special__error_functions__Li(): from sympy.functions.special.error_functions import Li assert _test_args(Li(2)) @SKIP("abstract class") def test_sympy__functions__special__error_functions__TrigonometricIntegral(): pass def test_sympy__functions__special__error_functions__Si(): from sympy.functions.special.error_functions import Si assert _test_args(Si(2)) def test_sympy__functions__special__error_functions__Ci(): from sympy.functions.special.error_functions import Ci assert _test_args(Ci(2)) def test_sympy__functions__special__error_functions__Shi(): from sympy.functions.special.error_functions import Shi assert _test_args(Shi(2)) def test_sympy__functions__special__error_functions__Chi(): from sympy.functions.special.error_functions import Chi assert _test_args(Chi(2)) def test_sympy__functions__special__error_functions__expint(): from sympy.functions.special.error_functions import expint assert _test_args(expint(y, x)) def test_sympy__functions__special__gamma_functions__gamma(): from sympy.functions.special.gamma_functions import gamma assert _test_args(gamma(x)) def test_sympy__functions__special__gamma_functions__loggamma(): from sympy.functions.special.gamma_functions import loggamma assert _test_args(loggamma(2)) def test_sympy__functions__special__gamma_functions__lowergamma(): from sympy.functions.special.gamma_functions import lowergamma assert _test_args(lowergamma(x, 2)) def test_sympy__functions__special__gamma_functions__polygamma(): from sympy.functions.special.gamma_functions import polygamma assert _test_args(polygamma(x, 2)) def test_sympy__functions__special__gamma_functions__uppergamma(): from sympy.functions.special.gamma_functions import uppergamma assert _test_args(uppergamma(x, 2)) def test_sympy__functions__special__beta_functions__beta(): from sympy.functions.special.beta_functions import beta assert _test_args(beta(x, x)) @SKIP("abstract class") def test_sympy__functions__special__hyper__TupleParametersBase(): pass @SKIP("abstract class") def test_sympy__functions__special__hyper__TupleArg(): pass def test_sympy__functions__special__hyper__hyper(): from sympy.functions.special.hyper import hyper assert _test_args(hyper([1, 2, 3], [4, 5], x)) def test_sympy__functions__special__hyper__meijerg(): from sympy.functions.special.hyper import meijerg assert _test_args(meijerg([1, 2, 3], [4, 5], [6], [], x)) @SKIP("abstract class") def test_sympy__functions__special__hyper__HyperRep(): pass def test_sympy__functions__special__hyper__HyperRep_power1(): from sympy.functions.special.hyper import HyperRep_power1 assert _test_args(HyperRep_power1(x, y)) def test_sympy__functions__special__hyper__HyperRep_power2(): from sympy.functions.special.hyper import HyperRep_power2 assert _test_args(HyperRep_power2(x, y)) def test_sympy__functions__special__hyper__HyperRep_log1(): from sympy.functions.special.hyper import HyperRep_log1 assert _test_args(HyperRep_log1(x)) def test_sympy__functions__special__hyper__HyperRep_atanh(): from sympy.functions.special.hyper import HyperRep_atanh assert _test_args(HyperRep_atanh(x)) def test_sympy__functions__special__hyper__HyperRep_asin1(): from sympy.functions.special.hyper import HyperRep_asin1 assert _test_args(HyperRep_asin1(x)) def test_sympy__functions__special__hyper__HyperRep_asin2(): from sympy.functions.special.hyper import HyperRep_asin2 assert _test_args(HyperRep_asin2(x)) def test_sympy__functions__special__hyper__HyperRep_sqrts1(): from sympy.functions.special.hyper import HyperRep_sqrts1 assert _test_args(HyperRep_sqrts1(x, y)) def test_sympy__functions__special__hyper__HyperRep_sqrts2(): from sympy.functions.special.hyper import HyperRep_sqrts2 assert _test_args(HyperRep_sqrts2(x, y)) def test_sympy__functions__special__hyper__HyperRep_log2(): from sympy.functions.special.hyper import HyperRep_log2 assert _test_args(HyperRep_log2(x)) def test_sympy__functions__special__hyper__HyperRep_cosasin(): from sympy.functions.special.hyper import HyperRep_cosasin assert _test_args(HyperRep_cosasin(x, y)) def test_sympy__functions__special__hyper__HyperRep_sinasin(): from sympy.functions.special.hyper import HyperRep_sinasin assert _test_args(HyperRep_sinasin(x, y)) @SKIP("abstract class") def test_sympy__functions__special__polynomials__OrthogonalPolynomial(): pass def test_sympy__functions__special__polynomials__jacobi(): from sympy.functions.special.polynomials import jacobi assert _test_args(jacobi(x, 2, 2, 2)) def test_sympy__functions__special__polynomials__gegenbauer(): from sympy.functions.special.polynomials import gegenbauer assert _test_args(gegenbauer(x, 2, 2)) def test_sympy__functions__special__polynomials__chebyshevt(): from sympy.functions.special.polynomials import chebyshevt assert _test_args(chebyshevt(x, 2)) def test_sympy__functions__special__polynomials__chebyshevt_root(): from sympy.functions.special.polynomials import chebyshevt_root assert _test_args(chebyshevt_root(3, 2)) def test_sympy__functions__special__polynomials__chebyshevu(): from sympy.functions.special.polynomials import chebyshevu assert _test_args(chebyshevu(x, 2)) def test_sympy__functions__special__polynomials__chebyshevu_root(): from sympy.functions.special.polynomials import chebyshevu_root assert _test_args(chebyshevu_root(3, 2)) def test_sympy__functions__special__polynomials__hermite(): from sympy.functions.special.polynomials import hermite assert _test_args(hermite(x, 2)) def test_sympy__functions__special__polynomials__legendre(): from sympy.functions.special.polynomials import legendre assert _test_args(legendre(x, 2)) def test_sympy__functions__special__polynomials__assoc_legendre(): from sympy.functions.special.polynomials import assoc_legendre assert _test_args(assoc_legendre(x, 0, y)) def test_sympy__functions__special__polynomials__laguerre(): from sympy.functions.special.polynomials import laguerre assert _test_args(laguerre(x, 2)) def test_sympy__functions__special__polynomials__assoc_laguerre(): from sympy.functions.special.polynomials import assoc_laguerre assert _test_args(assoc_laguerre(x, 0, y)) def test_sympy__functions__special__spherical_harmonics__Ynm(): from sympy.functions.special.spherical_harmonics import Ynm assert _test_args(Ynm(1, 1, x, y)) def test_sympy__functions__special__spherical_harmonics__Znm(): from sympy.functions.special.spherical_harmonics import Znm assert _test_args(Znm(1, 1, x, y)) def test_sympy__functions__special__tensor_functions__LeviCivita(): from sympy.functions.special.tensor_functions import LeviCivita assert _test_args(LeviCivita(x, y, 2)) def test_sympy__functions__special__tensor_functions__KroneckerDelta(): from sympy.functions.special.tensor_functions import KroneckerDelta assert _test_args(KroneckerDelta(x, y)) def test_sympy__functions__special__zeta_functions__dirichlet_eta(): from sympy.functions.special.zeta_functions import dirichlet_eta assert _test_args(dirichlet_eta(x)) def test_sympy__functions__special__zeta_functions__zeta(): from sympy.functions.special.zeta_functions import zeta assert _test_args(zeta(101)) def test_sympy__functions__special__zeta_functions__lerchphi(): from sympy.functions.special.zeta_functions import lerchphi assert _test_args(lerchphi(x, y, z)) def test_sympy__functions__special__zeta_functions__polylog(): from sympy.functions.special.zeta_functions import polylog assert _test_args(polylog(x, y)) def test_sympy__integrals__integrals__Integral(): from sympy.integrals.integrals import Integral assert _test_args(Integral(2, (x, 0, 1))) def test_sympy__integrals__risch__NonElementaryIntegral(): from sympy.integrals.risch import NonElementaryIntegral assert _test_args(NonElementaryIntegral(exp(-x**2), x)) @SKIP("abstract class") def test_sympy__integrals__transforms__IntegralTransform(): pass def test_sympy__integrals__transforms__MellinTransform(): from sympy.integrals.transforms import MellinTransform assert _test_args(MellinTransform(2, x, y)) def test_sympy__integrals__transforms__InverseMellinTransform(): from sympy.integrals.transforms import InverseMellinTransform assert _test_args(InverseMellinTransform(2, x, y, 0, 1)) def test_sympy__integrals__transforms__LaplaceTransform(): from sympy.integrals.transforms import LaplaceTransform assert _test_args(LaplaceTransform(2, x, y)) def test_sympy__integrals__transforms__InverseLaplaceTransform(): from sympy.integrals.transforms import InverseLaplaceTransform assert _test_args(InverseLaplaceTransform(2, x, y, 0)) @SKIP("abstract class") def test_sympy__integrals__transforms__FourierTypeTransform(): pass def test_sympy__integrals__transforms__InverseFourierTransform(): from sympy.integrals.transforms import InverseFourierTransform assert _test_args(InverseFourierTransform(2, x, y)) def test_sympy__integrals__transforms__FourierTransform(): from sympy.integrals.transforms import FourierTransform assert _test_args(FourierTransform(2, x, y)) @SKIP("abstract class") def test_sympy__integrals__transforms__SineCosineTypeTransform(): pass def test_sympy__integrals__transforms__InverseSineTransform(): from sympy.integrals.transforms import InverseSineTransform assert _test_args(InverseSineTransform(2, x, y)) def test_sympy__integrals__transforms__SineTransform(): from sympy.integrals.transforms import SineTransform assert _test_args(SineTransform(2, x, y)) def test_sympy__integrals__transforms__InverseCosineTransform(): from sympy.integrals.transforms import InverseCosineTransform assert _test_args(InverseCosineTransform(2, x, y)) def test_sympy__integrals__transforms__CosineTransform(): from sympy.integrals.transforms import CosineTransform assert _test_args(CosineTransform(2, x, y)) @SKIP("abstract class") def test_sympy__integrals__transforms__HankelTypeTransform(): pass def test_sympy__integrals__transforms__InverseHankelTransform(): from sympy.integrals.transforms import InverseHankelTransform assert _test_args(InverseHankelTransform(2, x, y, 0)) def test_sympy__integrals__transforms__HankelTransform(): from sympy.integrals.transforms import HankelTransform assert _test_args(HankelTransform(2, x, y, 0)) @XFAIL def test_sympy__liealgebras__cartan_type__CartanType_generator(): from sympy.liealgebras.cartan_type import CartanType_generator assert _test_args(CartanType_generator("A2")) @XFAIL def test_sympy__liealgebras__cartan_type__Standard_Cartan(): from sympy.liealgebras.cartan_type import Standard_Cartan assert _test_args(Standard_Cartan("A", 2)) @XFAIL def test_sympy__liealgebras__weyl_group__WeylGroup(): from sympy.liealgebras.weyl_group import WeylGroup assert _test_args(WeylGroup("B4")) @XFAIL def test_sympy__liealgebras__root_system__RootSystem(): from sympy.liealgebras.root_system import RootSystem assert _test_args(RootSystem("A2")) @XFAIL def test_sympy__liealgebras__type_a__TypeA(): from sympy.liealgebras.type_a import TypeA assert _test_args(TypeA(2)) @XFAIL def test_sympy__liealgebras__type_b__TypeB(): from sympy.liealgebras.type_b import TypeB assert _test_args(TypeB(4)) @XFAIL def test_sympy__liealgebras__type_c__TypeC(): from sympy.liealgebras.type_c import TypeC assert _test_args(TypeC(4)) @XFAIL def test_sympy__liealgebras__type_d__TypeD(): from sympy.liealgebras.type_d import TypeD assert _test_args(TypeD(4)) @XFAIL def test_sympy__liealgebras__type_e__TypeE(): from sympy.liealgebras.type_e import TypeE assert _test_args(TypeE(6)) @XFAIL def test_sympy__liealgebras__type_f__TypeF(): from sympy.liealgebras.type_f import TypeF assert _test_args(TypeF(4)) @XFAIL def test_sympy__liealgebras__type_g__TypeG(): from sympy.liealgebras.type_g import TypeG assert _test_args(TypeG(2)) def test_sympy__logic__boolalg__And(): from sympy.logic.boolalg import And assert _test_args(And(x, y, 2)) @SKIP("abstract class") def test_sympy__logic__boolalg__Boolean(): pass def test_sympy__logic__boolalg__BooleanFunction(): from sympy.logic.boolalg import BooleanFunction assert _test_args(BooleanFunction(1, 2, 3)) @SKIP("abstract class") def test_sympy__logic__boolalg__BooleanAtom(): pass def test_sympy__logic__boolalg__BooleanTrue(): from sympy.logic.boolalg import true assert _test_args(true) def test_sympy__logic__boolalg__BooleanFalse(): from sympy.logic.boolalg import false assert _test_args(false) def test_sympy__logic__boolalg__Equivalent(): from sympy.logic.boolalg import Equivalent assert _test_args(Equivalent(x, 2)) def test_sympy__logic__boolalg__ITE(): from sympy.logic.boolalg import ITE assert _test_args(ITE(x, y, 2)) def test_sympy__logic__boolalg__Implies(): from sympy.logic.boolalg import Implies assert _test_args(Implies(x, y)) def test_sympy__logic__boolalg__Nand(): from sympy.logic.boolalg import Nand assert _test_args(Nand(x, y, 2)) def test_sympy__logic__boolalg__Nor(): from sympy.logic.boolalg import Nor assert _test_args(Nor(x, y)) def test_sympy__logic__boolalg__Not(): from sympy.logic.boolalg import Not assert _test_args(Not(x)) def test_sympy__logic__boolalg__Or(): from sympy.logic.boolalg import Or assert _test_args(Or(x, y)) def test_sympy__logic__boolalg__Xor(): from sympy.logic.boolalg import Xor assert _test_args(Xor(x, y, 2)) def test_sympy__matrices__matrices__DeferredVector(): from sympy.matrices.matrices import DeferredVector assert _test_args(DeferredVector("X")) @SKIP("abstract class") def test_sympy__matrices__expressions__matexpr__MatrixBase(): pass def test_sympy__matrices__immutable__ImmutableMatrix(): from sympy.matrices.immutable import ImmutableMatrix m = ImmutableMatrix([[1, 2], [3, 4]]) assert _test_args(m) assert _test_args(Basic(*list(m))) m = ImmutableMatrix(1, 1, [1]) assert _test_args(m) assert _test_args(Basic(*list(m))) m = ImmutableMatrix(2, 2, lambda i, j: 1) assert m[0, 0] is S.One m = ImmutableMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j)) assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympified assert _test_args(m) assert _test_args(Basic(*list(m))) def test_sympy__matrices__immutable__ImmutableSparseMatrix(): from sympy.matrices.immutable import ImmutableSparseMatrix m = ImmutableSparseMatrix([[1, 2], [3, 4]]) assert _test_args(m) assert _test_args(Basic(*list(m))) m = ImmutableSparseMatrix(1, 1, {(0, 0): 1}) assert _test_args(m) assert _test_args(Basic(*list(m))) m = ImmutableSparseMatrix(1, 1, [1]) assert _test_args(m) assert _test_args(Basic(*list(m))) m = ImmutableSparseMatrix(2, 2, lambda i, j: 1) assert m[0, 0] is S.One m = ImmutableSparseMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j)) assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympified assert _test_args(m) assert _test_args(Basic(*list(m))) def test_sympy__matrices__expressions__slice__MatrixSlice(): from sympy.matrices.expressions.slice import MatrixSlice from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', 4, 4) assert _test_args(MatrixSlice(X, (0, 2), (0, 2))) def test_sympy__matrices__expressions__blockmatrix__BlockDiagMatrix(): from sympy.matrices.expressions.blockmatrix import BlockDiagMatrix from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', x, x) Y = MatrixSymbol('Y', y, y) assert _test_args(BlockDiagMatrix(X, Y)) def test_sympy__matrices__expressions__blockmatrix__BlockMatrix(): from sympy.matrices.expressions.blockmatrix import BlockMatrix from sympy.matrices.expressions import MatrixSymbol, ZeroMatrix X = MatrixSymbol('X', x, x) Y = MatrixSymbol('Y', y, y) Z = MatrixSymbol('Z', x, y) O = ZeroMatrix(y, x) assert _test_args(BlockMatrix([[X, Z], [O, Y]])) def test_sympy__matrices__expressions__inverse__Inverse(): from sympy.matrices.expressions.inverse import Inverse from sympy.matrices.expressions import MatrixSymbol assert _test_args(Inverse(MatrixSymbol('A', 3, 3))) def test_sympy__matrices__expressions__matadd__MatAdd(): from sympy.matrices.expressions.matadd import MatAdd from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', x, y) Y = MatrixSymbol('Y', x, y) assert _test_args(MatAdd(X, Y)) def test_sympy__matrices__expressions__matexpr__Identity(): from sympy.matrices.expressions.matexpr import Identity assert _test_args(Identity(3)) @SKIP("abstract class") def test_sympy__matrices__expressions__matexpr__MatrixExpr(): pass def test_sympy__matrices__expressions__matexpr__MatrixElement(): from sympy.matrices.expressions.matexpr import MatrixSymbol, MatrixElement from sympy import S assert _test_args(MatrixElement(MatrixSymbol('A', 3, 5), S(2), S(3))) @XFAIL def test_sympy__matrices__expressions__matexpr__MatrixSymbol(): from sympy.matrices.expressions.matexpr import MatrixSymbol assert _test_args(MatrixSymbol('A', 3, 5)) def test_sympy__matrices__expressions__matexpr__ZeroMatrix(): from sympy.matrices.expressions.matexpr import ZeroMatrix assert _test_args(ZeroMatrix(3, 5)) def test_sympy__matrices__expressions__matmul__MatMul(): from sympy.matrices.expressions.matmul import MatMul from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', x, y) Y = MatrixSymbol('Y', y, x) assert _test_args(MatMul(X, Y)) def test_sympy__matrices__expressions__diagonal__DiagonalMatrix(): from sympy.matrices.expressions.diagonal import DiagonalMatrix from sympy.matrices.expressions import MatrixSymbol x = MatrixSymbol('x', 10, 1) assert _test_args(DiagonalMatrix(x)) def test_sympy__matrices__expressions__diagonal__DiagonalOf(): from sympy.matrices.expressions.diagonal import DiagonalOf from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('x', 10, 10) assert _test_args(DiagonalOf(X)) def test_sympy__matrices__expressions__hadamard__HadamardProduct(): from sympy.matrices.expressions.hadamard import HadamardProduct from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', x, y) Y = MatrixSymbol('Y', x, y) assert _test_args(HadamardProduct(X, Y)) def test_sympy__matrices__expressions__matpow__MatPow(): from sympy.matrices.expressions.matpow import MatPow from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', x, x) assert _test_args(MatPow(X, 2)) def test_sympy__matrices__expressions__transpose__Transpose(): from sympy.matrices.expressions.transpose import Transpose from sympy.matrices.expressions import MatrixSymbol assert _test_args(Transpose(MatrixSymbol('A', 3, 5))) def test_sympy__matrices__expressions__adjoint__Adjoint(): from sympy.matrices.expressions.adjoint import Adjoint from sympy.matrices.expressions import MatrixSymbol assert _test_args(Adjoint(MatrixSymbol('A', 3, 5))) def test_sympy__matrices__expressions__trace__Trace(): from sympy.matrices.expressions.trace import Trace from sympy.matrices.expressions import MatrixSymbol assert _test_args(Trace(MatrixSymbol('A', 3, 3))) def test_sympy__matrices__expressions__determinant__Determinant(): from sympy.matrices.expressions.determinant import Determinant from sympy.matrices.expressions import MatrixSymbol assert _test_args(Determinant(MatrixSymbol('A', 3, 3))) def test_sympy__matrices__expressions__funcmatrix__FunctionMatrix(): from sympy.matrices.expressions.funcmatrix import FunctionMatrix from sympy import Lambda, symbols i, j = symbols('i,j') assert _test_args(FunctionMatrix(3, 3, Lambda((i, j), i - j) )) def test_sympy__matrices__expressions__fourier__DFT(): from sympy.matrices.expressions.fourier import DFT from sympy import S assert _test_args(DFT(S(2))) def test_sympy__matrices__expressions__fourier__IDFT(): from sympy.matrices.expressions.fourier import IDFT from sympy import S assert _test_args(IDFT(S(2))) from sympy.matrices.expressions import MatrixSymbol X = MatrixSymbol('X', 10, 10) def test_sympy__matrices__expressions__factorizations__LofLU(): from sympy.matrices.expressions.factorizations import LofLU assert _test_args(LofLU(X)) def test_sympy__matrices__expressions__factorizations__UofLU(): from sympy.matrices.expressions.factorizations import UofLU assert _test_args(UofLU(X)) def test_sympy__matrices__expressions__factorizations__QofQR(): from sympy.matrices.expressions.factorizations import QofQR assert _test_args(QofQR(X)) def test_sympy__matrices__expressions__factorizations__RofQR(): from sympy.matrices.expressions.factorizations import RofQR assert _test_args(RofQR(X)) def test_sympy__matrices__expressions__factorizations__LofCholesky(): from sympy.matrices.expressions.factorizations import LofCholesky assert _test_args(LofCholesky(X)) def test_sympy__matrices__expressions__factorizations__UofCholesky(): from sympy.matrices.expressions.factorizations import UofCholesky assert _test_args(UofCholesky(X)) def test_sympy__matrices__expressions__factorizations__EigenVectors(): from sympy.matrices.expressions.factorizations import EigenVectors assert _test_args(EigenVectors(X)) def test_sympy__matrices__expressions__factorizations__EigenValues(): from sympy.matrices.expressions.factorizations import EigenValues assert _test_args(EigenValues(X)) def test_sympy__matrices__expressions__factorizations__UofSVD(): from sympy.matrices.expressions.factorizations import UofSVD assert _test_args(UofSVD(X)) def test_sympy__matrices__expressions__factorizations__VofSVD(): from sympy.matrices.expressions.factorizations import VofSVD assert _test_args(VofSVD(X)) def test_sympy__matrices__expressions__factorizations__SofSVD(): from sympy.matrices.expressions.factorizations import SofSVD assert _test_args(SofSVD(X)) @SKIP("abstract class") def test_sympy__matrices__expressions__factorizations__Factorization(): pass def test_sympy__physics__vector__frame__CoordinateSym(): from sympy.physics.vector import CoordinateSym from sympy.physics.vector import ReferenceFrame assert _test_args(CoordinateSym('R_x', ReferenceFrame('R'), 0)) def test_sympy__physics__paulialgebra__Pauli(): from sympy.physics.paulialgebra import Pauli assert _test_args(Pauli(1)) def test_sympy__physics__quantum__anticommutator__AntiCommutator(): from sympy.physics.quantum.anticommutator import AntiCommutator assert _test_args(AntiCommutator(x, y)) def test_sympy__physics__quantum__cartesian__PositionBra3D(): from sympy.physics.quantum.cartesian import PositionBra3D assert _test_args(PositionBra3D(x, y, z)) def test_sympy__physics__quantum__cartesian__PositionKet3D(): from sympy.physics.quantum.cartesian import PositionKet3D assert _test_args(PositionKet3D(x, y, z)) def test_sympy__physics__quantum__cartesian__PositionState3D(): from sympy.physics.quantum.cartesian import PositionState3D assert _test_args(PositionState3D(x, y, z)) def test_sympy__physics__quantum__cartesian__PxBra(): from sympy.physics.quantum.cartesian import PxBra assert _test_args(PxBra(x, y, z)) def test_sympy__physics__quantum__cartesian__PxKet(): from sympy.physics.quantum.cartesian import PxKet assert _test_args(PxKet(x, y, z)) def test_sympy__physics__quantum__cartesian__PxOp(): from sympy.physics.quantum.cartesian import PxOp assert _test_args(PxOp(x, y, z)) def test_sympy__physics__quantum__cartesian__XBra(): from sympy.physics.quantum.cartesian import XBra assert _test_args(XBra(x)) def test_sympy__physics__quantum__cartesian__XKet(): from sympy.physics.quantum.cartesian import XKet assert _test_args(XKet(x)) def test_sympy__physics__quantum__cartesian__XOp(): from sympy.physics.quantum.cartesian import XOp assert _test_args(XOp(x)) def test_sympy__physics__quantum__cartesian__YOp(): from sympy.physics.quantum.cartesian import YOp assert _test_args(YOp(x)) def test_sympy__physics__quantum__cartesian__ZOp(): from sympy.physics.quantum.cartesian import ZOp assert _test_args(ZOp(x)) def test_sympy__physics__quantum__cg__CG(): from sympy.physics.quantum.cg import CG from sympy import S assert _test_args(CG(S(3)/2, S(3)/2, S(1)/2, -S(1)/2, 1, 1)) def test_sympy__physics__quantum__cg__Wigner3j(): from sympy.physics.quantum.cg import Wigner3j assert _test_args(Wigner3j(6, 0, 4, 0, 2, 0)) def test_sympy__physics__quantum__cg__Wigner6j(): from sympy.physics.quantum.cg import Wigner6j assert _test_args(Wigner6j(1, 2, 3, 2, 1, 2)) def test_sympy__physics__quantum__cg__Wigner9j(): from sympy.physics.quantum.cg import Wigner9j assert _test_args(Wigner9j(2, 1, 1, S(3)/2, S(1)/2, 1, S(1)/2, S(1)/2, 0)) def test_sympy__physics__quantum__circuitplot__Mz(): from sympy.physics.quantum.circuitplot import Mz assert _test_args(Mz(0)) def test_sympy__physics__quantum__circuitplot__Mx(): from sympy.physics.quantum.circuitplot import Mx assert _test_args(Mx(0)) def test_sympy__physics__quantum__commutator__Commutator(): from sympy.physics.quantum.commutator import Commutator A, B = symbols('A,B', commutative=False) assert _test_args(Commutator(A, B)) def test_sympy__physics__quantum__constants__HBar(): from sympy.physics.quantum.constants import HBar assert _test_args(HBar()) def test_sympy__physics__quantum__dagger__Dagger(): from sympy.physics.quantum.dagger import Dagger from sympy.physics.quantum.state import Ket assert _test_args(Dagger(Dagger(Ket('psi')))) def test_sympy__physics__quantum__gate__CGate(): from sympy.physics.quantum.gate import CGate, Gate assert _test_args(CGate((0, 1), Gate(2))) def test_sympy__physics__quantum__gate__CGateS(): from sympy.physics.quantum.gate import CGateS, Gate assert _test_args(CGateS((0, 1), Gate(2))) def test_sympy__physics__quantum__gate__CNotGate(): from sympy.physics.quantum.gate import CNotGate assert _test_args(CNotGate(0, 1)) def test_sympy__physics__quantum__gate__Gate(): from sympy.physics.quantum.gate import Gate assert _test_args(Gate(0)) def test_sympy__physics__quantum__gate__HadamardGate(): from sympy.physics.quantum.gate import HadamardGate assert _test_args(HadamardGate(0)) def test_sympy__physics__quantum__gate__IdentityGate(): from sympy.physics.quantum.gate import IdentityGate assert _test_args(IdentityGate(0)) def test_sympy__physics__quantum__gate__OneQubitGate(): from sympy.physics.quantum.gate import OneQubitGate assert _test_args(OneQubitGate(0)) def test_sympy__physics__quantum__gate__PhaseGate(): from sympy.physics.quantum.gate import PhaseGate assert _test_args(PhaseGate(0)) def test_sympy__physics__quantum__gate__SwapGate(): from sympy.physics.quantum.gate import SwapGate assert _test_args(SwapGate(0, 1)) def test_sympy__physics__quantum__gate__TGate(): from sympy.physics.quantum.gate import TGate assert _test_args(TGate(0)) def test_sympy__physics__quantum__gate__TwoQubitGate(): from sympy.physics.quantum.gate import TwoQubitGate assert _test_args(TwoQubitGate(0)) def test_sympy__physics__quantum__gate__UGate(): from sympy.physics.quantum.gate import UGate from sympy.matrices.immutable import ImmutableMatrix from sympy import Integer, Tuple assert _test_args( UGate(Tuple(Integer(1)), ImmutableMatrix([[1, 0], [0, 2]]))) def test_sympy__physics__quantum__gate__XGate(): from sympy.physics.quantum.gate import XGate assert _test_args(XGate(0)) def test_sympy__physics__quantum__gate__YGate(): from sympy.physics.quantum.gate import YGate assert _test_args(YGate(0)) def test_sympy__physics__quantum__gate__ZGate(): from sympy.physics.quantum.gate import ZGate assert _test_args(ZGate(0)) @SKIP("TODO: sympy.physics") def test_sympy__physics__quantum__grover__OracleGate(): from sympy.physics.quantum.grover import OracleGate assert _test_args(OracleGate()) def test_sympy__physics__quantum__grover__WGate(): from sympy.physics.quantum.grover import WGate assert _test_args(WGate(1)) def test_sympy__physics__quantum__hilbert__ComplexSpace(): from sympy.physics.quantum.hilbert import ComplexSpace assert _test_args(ComplexSpace(x)) def test_sympy__physics__quantum__hilbert__DirectSumHilbertSpace(): from sympy.physics.quantum.hilbert import DirectSumHilbertSpace, ComplexSpace, FockSpace c = ComplexSpace(2) f = FockSpace() assert _test_args(DirectSumHilbertSpace(c, f)) def test_sympy__physics__quantum__hilbert__FockSpace(): from sympy.physics.quantum.hilbert import FockSpace assert _test_args(FockSpace()) def test_sympy__physics__quantum__hilbert__HilbertSpace(): from sympy.physics.quantum.hilbert import HilbertSpace assert _test_args(HilbertSpace()) def test_sympy__physics__quantum__hilbert__L2(): from sympy.physics.quantum.hilbert import L2 from sympy import oo, Interval assert _test_args(L2(Interval(0, oo))) def test_sympy__physics__quantum__hilbert__TensorPowerHilbertSpace(): from sympy.physics.quantum.hilbert import TensorPowerHilbertSpace, FockSpace f = FockSpace() assert _test_args(TensorPowerHilbertSpace(f, 2)) def test_sympy__physics__quantum__hilbert__TensorProductHilbertSpace(): from sympy.physics.quantum.hilbert import TensorProductHilbertSpace, FockSpace, ComplexSpace c = ComplexSpace(2) f = FockSpace() assert _test_args(TensorProductHilbertSpace(f, c)) def test_sympy__physics__quantum__innerproduct__InnerProduct(): from sympy.physics.quantum import Bra, Ket, InnerProduct b = Bra('b') k = Ket('k') assert _test_args(InnerProduct(b, k)) def test_sympy__physics__quantum__operator__DifferentialOperator(): from sympy.physics.quantum.operator import DifferentialOperator from sympy import Derivative, Function f = Function('f') assert _test_args(DifferentialOperator(1/x*Derivative(f(x), x), f(x))) def test_sympy__physics__quantum__operator__HermitianOperator(): from sympy.physics.quantum.operator import HermitianOperator assert _test_args(HermitianOperator('H')) def test_sympy__physics__quantum__operator__IdentityOperator(): from sympy.physics.quantum.operator import IdentityOperator assert _test_args(IdentityOperator(5)) def test_sympy__physics__quantum__operator__Operator(): from sympy.physics.quantum.operator import Operator assert _test_args(Operator('A')) def test_sympy__physics__quantum__operator__OuterProduct(): from sympy.physics.quantum.operator import OuterProduct from sympy.physics.quantum import Ket, Bra b = Bra('b') k = Ket('k') assert _test_args(OuterProduct(k, b)) def test_sympy__physics__quantum__operator__UnitaryOperator(): from sympy.physics.quantum.operator import UnitaryOperator assert _test_args(UnitaryOperator('U')) def test_sympy__physics__quantum__piab__PIABBra(): from sympy.physics.quantum.piab import PIABBra assert _test_args(PIABBra('B')) def test_sympy__physics__quantum__boson__BosonOp(): from sympy.physics.quantum.boson import BosonOp assert _test_args(BosonOp('a')) assert _test_args(BosonOp('a', False)) def test_sympy__physics__quantum__boson__BosonFockKet(): from sympy.physics.quantum.boson import BosonFockKet assert _test_args(BosonFockKet(1)) def test_sympy__physics__quantum__boson__BosonFockBra(): from sympy.physics.quantum.boson import BosonFockBra assert _test_args(BosonFockBra(1)) def test_sympy__physics__quantum__boson__BosonCoherentKet(): from sympy.physics.quantum.boson import BosonCoherentKet assert _test_args(BosonCoherentKet(1)) def test_sympy__physics__quantum__boson__BosonCoherentBra(): from sympy.physics.quantum.boson import BosonCoherentBra assert _test_args(BosonCoherentBra(1)) def test_sympy__physics__quantum__fermion__FermionOp(): from sympy.physics.quantum.fermion import FermionOp assert _test_args(FermionOp('c')) assert _test_args(FermionOp('c', False)) def test_sympy__physics__quantum__fermion__FermionFockKet(): from sympy.physics.quantum.fermion import FermionFockKet assert _test_args(FermionFockKet(1)) def test_sympy__physics__quantum__fermion__FermionFockBra(): from sympy.physics.quantum.fermion import FermionFockBra assert _test_args(FermionFockBra(1)) def test_sympy__physics__quantum__pauli__SigmaOpBase(): from sympy.physics.quantum.pauli import SigmaOpBase assert _test_args(SigmaOpBase()) def test_sympy__physics__quantum__pauli__SigmaX(): from sympy.physics.quantum.pauli import SigmaX assert _test_args(SigmaX()) def test_sympy__physics__quantum__pauli__SigmaY(): from sympy.physics.quantum.pauli import SigmaY assert _test_args(SigmaY()) def test_sympy__physics__quantum__pauli__SigmaZ(): from sympy.physics.quantum.pauli import SigmaZ assert _test_args(SigmaZ()) def test_sympy__physics__quantum__pauli__SigmaMinus(): from sympy.physics.quantum.pauli import SigmaMinus assert _test_args(SigmaMinus()) def test_sympy__physics__quantum__pauli__SigmaPlus(): from sympy.physics.quantum.pauli import SigmaPlus assert _test_args(SigmaPlus()) def test_sympy__physics__quantum__pauli__SigmaZKet(): from sympy.physics.quantum.pauli import SigmaZKet assert _test_args(SigmaZKet(0)) def test_sympy__physics__quantum__pauli__SigmaZBra(): from sympy.physics.quantum.pauli import SigmaZBra assert _test_args(SigmaZBra(0)) def test_sympy__physics__quantum__piab__PIABHamiltonian(): from sympy.physics.quantum.piab import PIABHamiltonian assert _test_args(PIABHamiltonian('P')) def test_sympy__physics__quantum__piab__PIABKet(): from sympy.physics.quantum.piab import PIABKet assert _test_args(PIABKet('K')) def test_sympy__physics__quantum__qexpr__QExpr(): from sympy.physics.quantum.qexpr import QExpr assert _test_args(QExpr(0)) def test_sympy__physics__quantum__qft__Fourier(): from sympy.physics.quantum.qft import Fourier assert _test_args(Fourier(0, 1)) def test_sympy__physics__quantum__qft__IQFT(): from sympy.physics.quantum.qft import IQFT assert _test_args(IQFT(0, 1)) def test_sympy__physics__quantum__qft__QFT(): from sympy.physics.quantum.qft import QFT assert _test_args(QFT(0, 1)) def test_sympy__physics__quantum__qft__RkGate(): from sympy.physics.quantum.qft import RkGate assert _test_args(RkGate(0, 1)) def test_sympy__physics__quantum__qubit__IntQubit(): from sympy.physics.quantum.qubit import IntQubit assert _test_args(IntQubit(0)) def test_sympy__physics__quantum__qubit__IntQubitBra(): from sympy.physics.quantum.qubit import IntQubitBra assert _test_args(IntQubitBra(0)) def test_sympy__physics__quantum__qubit__IntQubitState(): from sympy.physics.quantum.qubit import IntQubitState, QubitState assert _test_args(IntQubitState(QubitState(0, 1))) def test_sympy__physics__quantum__qubit__Qubit(): from sympy.physics.quantum.qubit import Qubit assert _test_args(Qubit(0, 0, 0)) def test_sympy__physics__quantum__qubit__QubitBra(): from sympy.physics.quantum.qubit import QubitBra assert _test_args(QubitBra('1', 0)) def test_sympy__physics__quantum__qubit__QubitState(): from sympy.physics.quantum.qubit import QubitState assert _test_args(QubitState(0, 1)) def test_sympy__physics__quantum__density__Density(): from sympy.physics.quantum.density import Density from sympy.physics.quantum.state import Ket assert _test_args(Density([Ket(0), 0.5], [Ket(1), 0.5])) @SKIP("TODO: sympy.physics.quantum.shor: Cmod Not Implemented") def test_sympy__physics__quantum__shor__CMod(): from sympy.physics.quantum.shor import CMod assert _test_args(CMod()) def test_sympy__physics__quantum__spin__CoupledSpinState(): from sympy.physics.quantum.spin import CoupledSpinState assert _test_args(CoupledSpinState(1, 0, (1, 1))) assert _test_args(CoupledSpinState(1, 0, (1, S(1)/2, S(1)/2))) assert _test_args(CoupledSpinState( 1, 0, (1, S(1)/2, S(1)/2), ((2, 3, S(1)/2), (1, 2, 1)) )) j, m, j1, j2, j3, j12, x = symbols('j m j1:4 j12 x') assert CoupledSpinState( j, m, (j1, j2, j3)).subs(j2, x) == CoupledSpinState(j, m, (j1, x, j3)) assert CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, j12), (1, 2, j)) ).subs(j12, x) == \ CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, x), (1, 2, j)) ) def test_sympy__physics__quantum__spin__J2Op(): from sympy.physics.quantum.spin import J2Op assert _test_args(J2Op('J')) def test_sympy__physics__quantum__spin__JminusOp(): from sympy.physics.quantum.spin import JminusOp assert _test_args(JminusOp('J')) def test_sympy__physics__quantum__spin__JplusOp(): from sympy.physics.quantum.spin import JplusOp assert _test_args(JplusOp('J')) def test_sympy__physics__quantum__spin__JxBra(): from sympy.physics.quantum.spin import JxBra assert _test_args(JxBra(1, 0)) def test_sympy__physics__quantum__spin__JxBraCoupled(): from sympy.physics.quantum.spin import JxBraCoupled assert _test_args(JxBraCoupled(1, 0, (1, 1))) def test_sympy__physics__quantum__spin__JxKet(): from sympy.physics.quantum.spin import JxKet assert _test_args(JxKet(1, 0)) def test_sympy__physics__quantum__spin__JxKetCoupled(): from sympy.physics.quantum.spin import JxKetCoupled assert _test_args(JxKetCoupled(1, 0, (1, 1))) def test_sympy__physics__quantum__spin__JxOp(): from sympy.physics.quantum.spin import JxOp assert _test_args(JxOp('J')) def test_sympy__physics__quantum__spin__JyBra(): from sympy.physics.quantum.spin import JyBra assert _test_args(JyBra(1, 0)) def test_sympy__physics__quantum__spin__JyBraCoupled(): from sympy.physics.quantum.spin import JyBraCoupled assert _test_args(JyBraCoupled(1, 0, (1, 1))) def test_sympy__physics__quantum__spin__JyKet(): from sympy.physics.quantum.spin import JyKet assert _test_args(JyKet(1, 0)) def test_sympy__physics__quantum__spin__JyKetCoupled(): from sympy.physics.quantum.spin import JyKetCoupled assert _test_args(JyKetCoupled(1, 0, (1, 1))) def test_sympy__physics__quantum__spin__JyOp(): from sympy.physics.quantum.spin import JyOp assert _test_args(JyOp('J')) def test_sympy__physics__quantum__spin__JzBra(): from sympy.physics.quantum.spin import JzBra assert _test_args(JzBra(1, 0)) def test_sympy__physics__quantum__spin__JzBraCoupled(): from sympy.physics.quantum.spin import JzBraCoupled assert _test_args(JzBraCoupled(1, 0, (1, 1))) def test_sympy__physics__quantum__spin__JzKet(): from sympy.physics.quantum.spin import JzKet assert _test_args(JzKet(1, 0)) def test_sympy__physics__quantum__spin__JzKetCoupled(): from sympy.physics.quantum.spin import JzKetCoupled assert _test_args(JzKetCoupled(1, 0, (1, 1))) def test_sympy__physics__quantum__spin__JzOp(): from sympy.physics.quantum.spin import JzOp assert _test_args(JzOp('J')) def test_sympy__physics__quantum__spin__Rotation(): from sympy.physics.quantum.spin import Rotation from sympy import pi assert _test_args(Rotation(pi, 0, pi/2)) def test_sympy__physics__quantum__spin__SpinState(): from sympy.physics.quantum.spin import SpinState assert _test_args(SpinState(1, 0)) def test_sympy__physics__quantum__spin__WignerD(): from sympy.physics.quantum.spin import WignerD assert _test_args(WignerD(0, 1, 2, 3, 4, 5)) def test_sympy__physics__quantum__state__Bra(): from sympy.physics.quantum.state import Bra assert _test_args(Bra(0)) def test_sympy__physics__quantum__state__BraBase(): from sympy.physics.quantum.state import BraBase assert _test_args(BraBase(0)) def test_sympy__physics__quantum__state__Ket(): from sympy.physics.quantum.state import Ket assert _test_args(Ket(0)) def test_sympy__physics__quantum__state__KetBase(): from sympy.physics.quantum.state import KetBase assert _test_args(KetBase(0)) def test_sympy__physics__quantum__state__State(): from sympy.physics.quantum.state import State assert _test_args(State(0)) def test_sympy__physics__quantum__state__StateBase(): from sympy.physics.quantum.state import StateBase assert _test_args(StateBase(0)) def test_sympy__physics__quantum__state__TimeDepBra(): from sympy.physics.quantum.state import TimeDepBra assert _test_args(TimeDepBra('psi', 't')) def test_sympy__physics__quantum__state__TimeDepKet(): from sympy.physics.quantum.state import TimeDepKet assert _test_args(TimeDepKet('psi', 't')) def test_sympy__physics__quantum__state__TimeDepState(): from sympy.physics.quantum.state import TimeDepState assert _test_args(TimeDepState('psi', 't')) def test_sympy__physics__quantum__state__Wavefunction(): from sympy.physics.quantum.state import Wavefunction from sympy.functions import sin from sympy import Piecewise, pi n = 1 L = 1 g = Piecewise((0, x < 0), (0, x > L), (sqrt(2//L)*sin(n*pi*x/L), True)) assert _test_args(Wavefunction(g, x)) def test_sympy__physics__quantum__tensorproduct__TensorProduct(): from sympy.physics.quantum.tensorproduct import TensorProduct assert _test_args(TensorProduct(x, y)) def test_sympy__physics__quantum__identitysearch__GateIdentity(): from sympy.physics.quantum.gate import X from sympy.physics.quantum.identitysearch import GateIdentity assert _test_args(GateIdentity(X(0), X(0))) def test_sympy__physics__quantum__sho1d__SHOOp(): from sympy.physics.quantum.sho1d import SHOOp assert _test_args(SHOOp('a')) def test_sympy__physics__quantum__sho1d__RaisingOp(): from sympy.physics.quantum.sho1d import RaisingOp assert _test_args(RaisingOp('a')) def test_sympy__physics__quantum__sho1d__LoweringOp(): from sympy.physics.quantum.sho1d import LoweringOp assert _test_args(LoweringOp('a')) def test_sympy__physics__quantum__sho1d__NumberOp(): from sympy.physics.quantum.sho1d import NumberOp assert _test_args(NumberOp('N')) def test_sympy__physics__quantum__sho1d__Hamiltonian(): from sympy.physics.quantum.sho1d import Hamiltonian assert _test_args(Hamiltonian('H')) def test_sympy__physics__quantum__sho1d__SHOState(): from sympy.physics.quantum.sho1d import SHOState assert _test_args(SHOState(0)) def test_sympy__physics__quantum__sho1d__SHOKet(): from sympy.physics.quantum.sho1d import SHOKet assert _test_args(SHOKet(0)) def test_sympy__physics__quantum__sho1d__SHOBra(): from sympy.physics.quantum.sho1d import SHOBra assert _test_args(SHOBra(0)) def test_sympy__physics__secondquant__AnnihilateBoson(): from sympy.physics.secondquant import AnnihilateBoson assert _test_args(AnnihilateBoson(0)) def test_sympy__physics__secondquant__AnnihilateFermion(): from sympy.physics.secondquant import AnnihilateFermion assert _test_args(AnnihilateFermion(0)) @SKIP("abstract class") def test_sympy__physics__secondquant__Annihilator(): pass def test_sympy__physics__secondquant__AntiSymmetricTensor(): from sympy.physics.secondquant import AntiSymmetricTensor i, j = symbols('i j', below_fermi=True) a, b = symbols('a b', above_fermi=True) assert _test_args(AntiSymmetricTensor('v', (a, i), (b, j))) def test_sympy__physics__secondquant__BosonState(): from sympy.physics.secondquant import BosonState assert _test_args(BosonState((0, 1))) @SKIP("abstract class") def test_sympy__physics__secondquant__BosonicOperator(): pass def test_sympy__physics__secondquant__Commutator(): from sympy.physics.secondquant import Commutator assert _test_args(Commutator(x, y)) def test_sympy__physics__secondquant__CreateBoson(): from sympy.physics.secondquant import CreateBoson assert _test_args(CreateBoson(0)) def test_sympy__physics__secondquant__CreateFermion(): from sympy.physics.secondquant import CreateFermion assert _test_args(CreateFermion(0)) @SKIP("abstract class") def test_sympy__physics__secondquant__Creator(): pass def test_sympy__physics__secondquant__Dagger(): from sympy.physics.secondquant import Dagger from sympy import I assert _test_args(Dagger(2*I)) def test_sympy__physics__secondquant__FermionState(): from sympy.physics.secondquant import FermionState assert _test_args(FermionState((0, 1))) def test_sympy__physics__secondquant__FermionicOperator(): from sympy.physics.secondquant import FermionicOperator assert _test_args(FermionicOperator(0)) def test_sympy__physics__secondquant__FockState(): from sympy.physics.secondquant import FockState assert _test_args(FockState((0, 1))) def test_sympy__physics__secondquant__FockStateBosonBra(): from sympy.physics.secondquant import FockStateBosonBra assert _test_args(FockStateBosonBra((0, 1))) def test_sympy__physics__secondquant__FockStateBosonKet(): from sympy.physics.secondquant import FockStateBosonKet assert _test_args(FockStateBosonKet((0, 1))) def test_sympy__physics__secondquant__FockStateBra(): from sympy.physics.secondquant import FockStateBra assert _test_args(FockStateBra((0, 1))) def test_sympy__physics__secondquant__FockStateFermionBra(): from sympy.physics.secondquant import FockStateFermionBra assert _test_args(FockStateFermionBra((0, 1))) def test_sympy__physics__secondquant__FockStateFermionKet(): from sympy.physics.secondquant import FockStateFermionKet assert _test_args(FockStateFermionKet((0, 1))) def test_sympy__physics__secondquant__FockStateKet(): from sympy.physics.secondquant import FockStateKet assert _test_args(FockStateKet((0, 1))) def test_sympy__physics__secondquant__InnerProduct(): from sympy.physics.secondquant import InnerProduct from sympy.physics.secondquant import FockStateKet, FockStateBra assert _test_args(InnerProduct(FockStateBra((0, 1)), FockStateKet((0, 1)))) def test_sympy__physics__secondquant__NO(): from sympy.physics.secondquant import NO, F, Fd assert _test_args(NO(Fd(x)*F(y))) def test_sympy__physics__secondquant__PermutationOperator(): from sympy.physics.secondquant import PermutationOperator assert _test_args(PermutationOperator(0, 1)) def test_sympy__physics__secondquant__SqOperator(): from sympy.physics.secondquant import SqOperator assert _test_args(SqOperator(0)) def test_sympy__physics__secondquant__TensorSymbol(): from sympy.physics.secondquant import TensorSymbol assert _test_args(TensorSymbol(x)) def test_sympy__physics__units__Unit(): from sympy.physics.units import Unit assert _test_args(Unit("meter", "m")) def test_sympy__physics__unitsystems__dimensions__Dimension(): from sympy.physics.unitsystems.dimensions import Dimension assert _test_args(Dimension(name="length", symbol="L", length=1)) def test_sympy__physics__unitsystems__quantities__Quantity(): from sympy.physics.unitsystems.quantities import Quantity from sympy.physics.unitsystems.systems import mks assert _test_args(Quantity(10, mks["m"])) def test_sympy__physics__unitsystems__units__Constant(): from sympy.physics.unitsystems.units import Constant from sympy.physics.unitsystems.dimensions import Dimension length = Dimension(length=1) assert _test_args(Constant(length, abbrev="u", factor=10)) def test_sympy__physics__unitsystems__units__Unit(): from sympy.physics.unitsystems.units import Unit from sympy.physics.unitsystems.dimensions import Dimension length = Dimension(length=1) assert _test_args(Unit(length, abbrev="u", factor=10)) def test_sympy__core__numbers__AlgebraicNumber(): from sympy.core.numbers import AlgebraicNumber assert _test_args(AlgebraicNumber(sqrt(2), [1, 2, 3])) def test_sympy__polys__polytools__GroebnerBasis(): from sympy.polys.polytools import GroebnerBasis assert _test_args(GroebnerBasis([x, y, z], x, y, z)) def test_sympy__polys__polytools__Poly(): from sympy.polys.polytools import Poly assert _test_args(Poly(2, x, y)) def test_sympy__polys__polytools__PurePoly(): from sympy.polys.polytools import PurePoly assert _test_args(PurePoly(2, x, y)) def test_sympy__polys__rootoftools__RootOf(): from sympy.polys.rootoftools import RootOf assert _test_args(RootOf(x**3 + x + 1, 0)) def test_sympy__polys__rootoftools__RootSum(): from sympy.polys.rootoftools import RootSum assert _test_args(RootSum(x**3 + x + 1, sin)) def test_sympy__series__limits__Limit(): from sympy.series.limits import Limit assert _test_args(Limit(x, x, 0, dir='-')) def test_sympy__series__order__Order(): from sympy.series.order import Order assert _test_args(Order(1, x, y)) def test_sympy__simplify__hyperexpand__Hyper_Function(): from sympy.simplify.hyperexpand import Hyper_Function assert _test_args(Hyper_Function([2], [1])) def test_sympy__simplify__hyperexpand__G_Function(): from sympy.simplify.hyperexpand import G_Function assert _test_args(G_Function([2], [1], [], [])) def test_sympy__tensor__indexed__Idx(): from sympy.tensor.indexed import Idx assert _test_args(Idx('test')) assert _test_args(Idx(1, (0, 10))) def test_sympy__tensor__indexed__Indexed(): from sympy.tensor.indexed import Indexed, Idx assert _test_args(Indexed('A', Idx('i'), Idx('j'))) def test_sympy__tensor__indexed__IndexedBase(): from sympy.tensor.indexed import IndexedBase assert _test_args(IndexedBase('A', shape=(x, y))) assert _test_args(IndexedBase('A', 1)) assert _test_args(IndexedBase('A')[0, 1]) @XFAIL def test_sympy__physics__hep__gamma_matrices__GammaMatrixHead(): # This test fails, this class can be reconstructed from the *args # of an instance using `TensorHead(*args)` from sympy.physics.hep.gamma_matrices import GammaMatrixHead, Lorentz from sympy.tensor.tensor import tensor_indices i = tensor_indices('i', Lorentz) assert _test_args(GammaMatrixHead()) def test_sympy__tensor__tensor__TensorIndexType(): from sympy.tensor.tensor import TensorIndexType assert _test_args(TensorIndexType('Lorentz', metric=False)) def test_sympy__tensor__tensor__TensorSymmetry(): from sympy.tensor.tensor import TensorSymmetry, get_symmetric_group_sgs assert _test_args(TensorSymmetry(get_symmetric_group_sgs(2))) def test_sympy__tensor__tensor__TensorType(): from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, get_symmetric_group_sgs, TensorType Lorentz = TensorIndexType('Lorentz', dummy_fmt='L') sym = TensorSymmetry(get_symmetric_group_sgs(1)) assert _test_args(TensorType([Lorentz], sym)) def test_sympy__tensor__tensor__TensorHead(): from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, TensorHead Lorentz = TensorIndexType('Lorentz', dummy_fmt='L') sym = TensorSymmetry(get_symmetric_group_sgs(1)) S1 = TensorType([Lorentz], sym) assert _test_args(TensorHead('p', S1, 0)) def test_sympy__tensor__tensor__TensorIndex(): from sympy.tensor.tensor import TensorIndexType, TensorIndex Lorentz = TensorIndexType('Lorentz', dummy_fmt='L') assert _test_args(TensorIndex('i', Lorentz)) @SKIP("abstract class") def test_sympy__tensor__tensor__TensExpr(): pass def test_sympy__tensor__tensor__TensAdd(): from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensAdd Lorentz = TensorIndexType('Lorentz', dummy_fmt='L') a, b = tensor_indices('a,b', Lorentz) sym = TensorSymmetry(get_symmetric_group_sgs(1)) S1 = TensorType([Lorentz], sym) p, q = S1('p,q') t1 = p(a) t2 = q(a) assert _test_args(TensAdd(t1, t2)) def test_sympy__tensor__tensor__Tensor(): from sympy.core import S from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDS Lorentz = TensorIndexType('Lorentz', dummy_fmt='L') a, b = tensor_indices('a,b', Lorentz) sym = TensorSymmetry(get_symmetric_group_sgs(1)) S1 = TensorType([Lorentz], sym) p = S1('p') assert _test_args(p(a)) def test_sympy__tensor__tensor__TensMul(): from sympy.core import S from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDS Lorentz = TensorIndexType('Lorentz', dummy_fmt='L') a, b = tensor_indices('a,b', Lorentz) sym = TensorSymmetry(get_symmetric_group_sgs(1)) S1 = TensorType([Lorentz], sym) p = S1('p') q = S1('q') assert _test_args(3*p(a)*q(b)) def test_as_coeff_add(): assert (7, (3*x, 4*x**2)) == (7 + 3*x + 4*x**2).as_coeff_add() def test_sympy__geometry__curve__Curve(): from sympy.geometry.curve import Curve assert _test_args(Curve((x, 1), (x, 0, 1))) def test_sympy__geometry__point__Point(): from sympy.geometry.point import Point assert _test_args(Point(0, 1)) def test_sympy__geometry__point3d__Point3D(): from sympy.geometry.point3d import Point3D assert _test_args(Point3D(0, 1, 2)) def test_sympy__geometry__ellipse__Ellipse(): from sympy.geometry.ellipse import Ellipse assert _test_args(Ellipse((0, 1), 2, 3)) def test_sympy__geometry__ellipse__Circle(): from sympy.geometry.ellipse import Circle assert _test_args(Circle((0, 1), 2)) @SKIP("abstract class") def test_sympy__geometry__line__LinearEntity(): pass def test_sympy__geometry__line__Line(): from sympy.geometry.line import Line assert _test_args(Line((0, 1), (2, 3))) def test_sympy__geometry__line__Ray(): from sympy.geometry.line import Ray assert _test_args(Ray((0, 1), (2, 3))) def test_sympy__geometry__line__Segment(): from sympy.geometry.line import Segment assert _test_args(Segment((0, 1), (2, 3))) @SKIP("abstract class") def test_sympy__geometry__line3d__LinearEntity3D(): pass def test_sympy__geometry__line3d__Line3D(): from sympy.geometry.line3d import Line3D assert _test_args(Line3D((0, 1, 1), (2, 3, 4))) def test_sympy__geometry__line3d__Segment3D(): from sympy.geometry.line3d import Segment3D assert _test_args(Segment3D((0, 1, 1), (2, 3, 4))) def test_sympy__geometry__line3d__Ray3D(): from sympy.geometry.line3d import Ray3D assert _test_args(Ray3D((0, 1, 1), (2, 3, 4))) def test_sympy__geometry__plane__Plane(): from sympy.geometry.plane import Plane assert _test_args(Plane((1, 1, 1), (-3, 4, -2), (1, 2, 3))) def test_sympy__geometry__polygon__Polygon(): from sympy.geometry.polygon import Polygon assert _test_args(Polygon((0, 1), (2, 3), (4, 5), (6, 7))) def test_sympy__geometry__polygon__RegularPolygon(): from sympy.geometry.polygon import RegularPolygon assert _test_args(RegularPolygon((0, 1), 2, 3, 4)) def test_sympy__geometry__polygon__Triangle(): from sympy.geometry.polygon import Triangle assert _test_args(Triangle((0, 1), (2, 3), (4, 5))) def test_sympy__geometry__entity__GeometryEntity(): from sympy.geometry.entity import GeometryEntity from sympy.geometry.point import Point assert _test_args(GeometryEntity(Point(1, 0), 1, [1, 2])) def test_sympy__diffgeom__diffgeom__Manifold(): from sympy.diffgeom import Manifold assert _test_args(Manifold('name', 3)) def test_sympy__diffgeom__diffgeom__Patch(): from sympy.diffgeom import Manifold, Patch assert _test_args(Patch('name', Manifold('name', 3))) def test_sympy__diffgeom__diffgeom__CoordSystem(): from sympy.diffgeom import Manifold, Patch, CoordSystem assert _test_args(CoordSystem('name', Patch('name', Manifold('name', 3)))) @XFAIL def test_sympy__diffgeom__diffgeom__Point(): from sympy.diffgeom import Manifold, Patch, CoordSystem, Point assert _test_args(Point( CoordSystem('name', Patch('name', Manifold('name', 3))), [x, y])) def test_sympy__diffgeom__diffgeom__BaseScalarField(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField cs = CoordSystem('name', Patch('name', Manifold('name', 3))) assert _test_args(BaseScalarField(cs, 0)) def test_sympy__diffgeom__diffgeom__BaseVectorField(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField cs = CoordSystem('name', Patch('name', Manifold('name', 3))) assert _test_args(BaseVectorField(cs, 0)) def test_sympy__diffgeom__diffgeom__Differential(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential cs = CoordSystem('name', Patch('name', Manifold('name', 3))) assert _test_args(Differential(BaseScalarField(cs, 0))) def test_sympy__diffgeom__diffgeom__Commutator(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, Commutator cs = CoordSystem('name', Patch('name', Manifold('name', 3))) cs1 = CoordSystem('name1', Patch('name', Manifold('name', 3))) v = BaseVectorField(cs, 0) v1 = BaseVectorField(cs1, 0) assert _test_args(Commutator(v, v1)) def test_sympy__diffgeom__diffgeom__TensorProduct(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, TensorProduct cs = CoordSystem('name', Patch('name', Manifold('name', 3))) d = Differential(BaseScalarField(cs, 0)) assert _test_args(TensorProduct(d, d)) def test_sympy__diffgeom__diffgeom__WedgeProduct(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, WedgeProduct cs = CoordSystem('name', Patch('name', Manifold('name', 3))) d = Differential(BaseScalarField(cs, 0)) d1 = Differential(BaseScalarField(cs, 1)) assert _test_args(WedgeProduct(d, d1)) def test_sympy__diffgeom__diffgeom__LieDerivative(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, BaseVectorField, LieDerivative cs = CoordSystem('name', Patch('name', Manifold('name', 3))) d = Differential(BaseScalarField(cs, 0)) v = BaseVectorField(cs, 0) assert _test_args(LieDerivative(v, d)) @XFAIL def test_sympy__diffgeom__diffgeom__BaseCovarDerivativeOp(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseCovarDerivativeOp cs = CoordSystem('name', Patch('name', Manifold('name', 3))) assert _test_args(BaseCovarDerivativeOp(cs, 0, [[[0, ]*3, ]*3, ]*3)) def test_sympy__diffgeom__diffgeom__CovarDerivativeOp(): from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, CovarDerivativeOp cs = CoordSystem('name', Patch('name', Manifold('name', 3))) v = BaseVectorField(cs, 0) _test_args(CovarDerivativeOp(v, [[[0, ]*3, ]*3, ]*3)) def test_sympy__categories__baseclasses__Class(): from sympy.categories.baseclasses import Class assert _test_args(Class()) def test_sympy__categories__baseclasses__Object(): from sympy.categories import Object assert _test_args(Object("A")) @XFAIL def test_sympy__categories__baseclasses__Morphism(): from sympy.categories import Object, Morphism assert _test_args(Morphism(Object("A"), Object("B"))) def test_sympy__categories__baseclasses__IdentityMorphism(): from sympy.categories import Object, IdentityMorphism assert _test_args(IdentityMorphism(Object("A"))) def test_sympy__categories__baseclasses__NamedMorphism(): from sympy.categories import Object, NamedMorphism assert _test_args(NamedMorphism(Object("A"), Object("B"), "f")) def test_sympy__categories__baseclasses__CompositeMorphism(): from sympy.categories import Object, NamedMorphism, CompositeMorphism A = Object("A") B = Object("B") C = Object("C") f = NamedMorphism(A, B, "f") g = NamedMorphism(B, C, "g") assert _test_args(CompositeMorphism(f, g)) def test_sympy__categories__baseclasses__Diagram(): from sympy.categories import Object, NamedMorphism, Diagram A = Object("A") B = Object("B") C = Object("C") f = NamedMorphism(A, B, "f") d = Diagram([f]) assert _test_args(d) def test_sympy__categories__baseclasses__Category(): from sympy.categories import Object, NamedMorphism, Diagram, Category A = Object("A") B = Object("B") C = Object("C") f = NamedMorphism(A, B, "f") g = NamedMorphism(B, C, "g") d1 = Diagram([f, g]) d2 = Diagram([f]) K = Category("K", commutative_diagrams=[d1, d2]) assert _test_args(K) def test_sympy__ntheory__factor___totient(): from sympy.ntheory.factor_ import totient k = symbols('k', integer=True) t = totient(k) assert _test_args(t) def test_sympy__ntheory__factor___divisor_sigma(): from sympy.ntheory.factor_ import divisor_sigma k = symbols('k', integer=True) n = symbols('n', integer=True) t = divisor_sigma(n, k) assert _test_args(t) def test_sympy__ntheory__residue_ntheory__mobius(): from sympy.ntheory import mobius assert _test_args(mobius(2)) def test_sympy__physics__optics__waves__TWave(): from sympy.physics.optics import TWave A, f, phi = symbols('A, f, phi') assert _test_args(TWave(A, f, phi)) def test_sympy__physics__optics__gaussopt__BeamParameter(): from sympy.physics.optics import BeamParameter assert _test_args(BeamParameter(530e-9, 1, w=1e-3)) def test_sympy__physics__optics__medium__Medium(): from sympy.physics.optics import Medium assert _test_args(Medium('m')) def test_sympy__printing__codeprinter__Assignment(): from sympy.printing.codeprinter import Assignment assert _test_args(Assignment(x, y)) def test_sympy__vector__coordsysrect__CoordSysCartesian(): from sympy.vector.coordsysrect import CoordSysCartesian assert _test_args(CoordSysCartesian('C')) def test_sympy__vector__point__Point(): from sympy.vector.point import Point assert _test_args(Point('P')) def test_sympy__vector__basisdependent__BasisDependent(): from sympy.vector.basisdependent import BasisDependent #These classes have been created to maintain an OOP hierarchy #for Vectors and Dyadics. Are NOT meant to be initialized def test_sympy__vector__basisdependent__BasisDependentMul(): from sympy.vector.basisdependent import BasisDependentMul #These classes have been created to maintain an OOP hierarchy #for Vectors and Dyadics. Are NOT meant to be initialized def test_sympy__vector__basisdependent__BasisDependentAdd(): from sympy.vector.basisdependent import BasisDependentAdd #These classes have been created to maintain an OOP hierarchy #for Vectors and Dyadics. Are NOT meant to be initialized def test_sympy__vector__basisdependent__BasisDependentZero(): from sympy.vector.basisdependent import BasisDependentZero #These classes have been created to maintain an OOP hierarchy #for Vectors and Dyadics. Are NOT meant to be initialized def test_sympy__vector__vector__BaseVector(): from sympy.vector.vector import BaseVector from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(BaseVector('Ci', 0, C, ' ', ' ')) def test_sympy__vector__vector__VectorAdd(): from sympy.vector.vector import VectorAdd, VectorMul from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') from sympy.abc import a, b, c, x, y, z v1 = a*C.i + b*C.j + c*C.k v2 = x*C.i + y*C.j + z*C.k assert _test_args(VectorAdd(v1, v2)) assert _test_args(VectorMul(x, v1)) def test_sympy__vector__vector__VectorMul(): from sympy.vector.vector import VectorMul from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') from sympy.abc import a assert _test_args(VectorMul(a, C.i)) def test_sympy__vector__vector__VectorZero(): from sympy.vector.vector import VectorZero assert _test_args(VectorZero()) def test_sympy__vector__vector__Vector(): from sympy.vector.vector import Vector #Vector is never to be initialized using args pass def test_sympy__vector__dyadic__Dyadic(): from sympy.vector.dyadic import Dyadic #Dyadic is never to be initialized using args pass def test_sympy__vector__dyadic__BaseDyadic(): from sympy.vector.dyadic import BaseDyadic from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(BaseDyadic(C.i, C.j)) def test_sympy__vector__dyadic__DyadicMul(): from sympy.vector.dyadic import BaseDyadic, DyadicMul from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(DyadicMul(3, BaseDyadic(C.i, C.j))) def test_sympy__vector__dyadic__DyadicAdd(): from sympy.vector.dyadic import BaseDyadic, DyadicAdd from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(2 * DyadicAdd(BaseDyadic(C.i, C.i), BaseDyadic(C.i, C.j))) def test_sympy__vector__dyadic__DyadicZero(): from sympy.vector.dyadic import DyadicZero assert _test_args(DyadicZero()) def test_sympy__vector__deloperator__Del(): from sympy.vector.deloperator import Del from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(Del(C)) def test_sympy__vector__orienters__Orienter(): from sympy.vector.orienters import Orienter #Not to be initialized def test_sympy__vector__orienters__ThreeAngleOrienter(): from sympy.vector.orienters import ThreeAngleOrienter #Not to be initialized def test_sympy__vector__orienters__AxisOrienter(): from sympy.vector.orienters import AxisOrienter from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(AxisOrienter(x, C.i)) def test_sympy__vector__orienters__BodyOrienter(): from sympy.vector.orienters import BodyOrienter assert _test_args(BodyOrienter(x, y, z, '123')) def test_sympy__vector__orienters__SpaceOrienter(): from sympy.vector.orienters import SpaceOrienter assert _test_args(SpaceOrienter(x, y, z, '123')) def test_sympy__vector__orienters__QuaternionOrienter(): from sympy.vector.orienters import QuaternionOrienter a, b, c, d = symbols('a b c d') assert _test_args(QuaternionOrienter(a, b, c, d)) def test_sympy__vector__scalar__BaseScalar(): from sympy.vector.scalar import BaseScalar from sympy.vector.coordsysrect import CoordSysCartesian C = CoordSysCartesian('C') assert _test_args(BaseScalar('Cx', 0, C, ' ', ' '))
from sympy.functions.elementary.complexes import transpose assert _test_args(transpose(x))
create_table.go
package main import ( "log" builder "github.com/Snehal1112/QueryBuilder" "github.com/Snehal1112/QueryBuilder/query" ) func CreateTable(builder builder.SQL) { // Create table. ddlQuery := builder.NewDDL() create := ddlQuery.Create() /**
categoryId INT AUTO_INCREMENT PRIMARY KEY, categoryName VARCHAR(100) NOT NULL ); */ categoriesTable := create.Table("categories") categoriesTable.Field("categoryId", query.NewDataType(query.INT, 50), query.NewConstrain([]int{query.NOTNULL, query.AI, query.PK})) categoriesTable.Field("categoryName", query.NewDataType(query.VARCHAR, 100), query.NewConstrain(nil)) result, err := categoriesTable.Execute() if err != nil { log.Fatal(err) } _, err = result.LastInsertId() if err != nil { log.Fatal(err) } log.Println("Categories table created ") /** CREATE TABLE products( productId INT AUTO_INCREMENT PRIMARY KEY, productName varchar(100) not null, categoryId INT NOT NULL, CONSTRAINT fk_category FOREIGN KEY (categoryId) REFERENCES categories(categoryId) ON UPDATE CASCADE ON DELETE CASCADE ) */ // Associated products table with categories productTable := create.Table("products") productTable.Field("productId", query.NewDataType(query.INT, 50), query.NewConstrain([]int{query.AI, query.PK})) productTable.Field("productName", query.NewDataType(query.VARCHAR, 225), query.NewConstrain([]int{query.NOTNULL})) productTable.Field("categoryId", query.NewDataType(query.INT, 50), query.NewConstrain(nil)) productTable.NewForeignKeyConstrain("fk_category", "categoryId", "categories") productTable.SetForeignKey(query.Cascade, query.Cascade) result, err = productTable.Execute() if err != nil { log.Fatal(err) } _, err = result.LastInsertId() if err != nil { log.Fatal(err) } log.Println("Products table created ") }
CREATE TABLE categories(
packet.rs
use super::engine; use super::memory; use super::lib; use std::cmp; use std::mem; // PACKET STRUCT AND FREELIST // // This module defines a struct to represent packets of network data, and // implements a global freelist from which packets can be allocated. // // Packet - packet structure with length and data fields // PAYLOAD_SIZE - size of packet’s data field // init() - initializes the freelist with FREELIST_SIZE packets // allocate() -> Box<Packet> - take a packet off the freelist for use // free(Box<Packet>) - return a packet to the freelist // The maximum amount of payload in any given packet. pub const PAYLOAD_SIZE: usize = 1024*10; // Packet of network data, with associated metadata. // XXX - should be #[repr(C, packed)], however that would require unsafe{} to // access members. Is the memory layout in repr(rust) equivalent? pub struct Packet { pub length: u16, // data payload length pub data: [u8; PAYLOAD_SIZE] } // A packet may never go out of scope. It is either on the freelist, a link, or // in active use (in-scope). // XXX - Could free() packets automatically in Drop, and obsolete manual free. impl Drop for Packet { fn drop(&mut self) { panic!("Packet leaked"); } } // Allocate a packet struct on the heap (initialized all-zero). // NB: Box is how we heap-allocate in Rust. fn new_packet() -> Box<Packet> { let base = memory::dma_alloc(mem::size_of::<Packet>(), mem::align_of::<Packet>()); let mut p = unsafe { Box::from_raw(base as *mut Packet) }; p.length = 0; p } fn new_packet_noroot() -> Box<Packet> { Box::new(Packet { length: 0, data: [0; PAYLOAD_SIZE] }) } // Maximum number of packets on the freelist. const MAX_PACKETS: usize = 1_000_000; // Freelist consists of an array of mutable raw pointers to Packet, // and a fill counter. struct Freelist { list: [*mut Packet; MAX_PACKETS], nfree: usize } // FL: global freelist (initially empty, populated with null ptrs). static mut FL: Freelist = Freelist { list: [std::ptr::null_mut(); MAX_PACKETS], nfree: 0 }; // Fill up FL with freshly allocated packets. // NB: using FL is unsafe because it is a mutable static (we have to ensure // thread safety). // NB: use DMA allocator if run as root, regular heap allocator otherwise. static mut PACKETS_ALLOCATED: usize = 0; static mut PACKET_ALLOCATION_STEP: usize = 1000; fn pr
) { let new_packet = match unsafe { libc::getuid() } { 0 => new_packet, _ => new_packet_noroot }; unsafe { assert!(PACKETS_ALLOCATED + PACKET_ALLOCATION_STEP <= MAX_PACKETS, "Packet allocation overflow"); for _ in 0..PACKET_ALLOCATION_STEP { free_internal(new_packet()); } PACKETS_ALLOCATED += PACKET_ALLOCATION_STEP; PACKET_ALLOCATION_STEP *= 2; } } // Allocate an empty Boxed Packet from FL. // NB: we can use Box::from_raw safely on the packets "leaked" onto // the static FL. We can also be sure that the Box does not alias another // packet (see free). #[inline(always)] pub fn allocate() -> Box<Packet> { if unsafe { FL.nfree == 0 } { preallocate_step(); } unsafe { FL.nfree -= 1; } unsafe { Box::from_raw(FL.list[FL.nfree]) } } // Return Boxed Packet to FL. // NB: because p is mutable and Box does not implement the Copy trait free // effectively consumes the Box. Once a packet is freed it can no longer be // referenced, and hence can not me mutated once it has been returned to the // freelist. // NB: we can cast a mutable reference of the boxed packet (&mut *p) to a raw // pointer. // NB: we std::mem::forget the Box p to inhibit Dropping of the packet once it // is on the freelist. (I.e., we intentionally leak up to MAX_PACKETS packets // onto the static FL.) If a packet goes out of scope without being freed, the // attempt to Drop it will trigger a panic (see Packet). Hence we ensure that // all allocated packets are eventually freed. fn free_internal(mut p: Box<Packet>) { if unsafe { FL.nfree } == MAX_PACKETS { panic!("Packet freelist overflow"); } p.length = 0; unsafe { FL.list[FL.nfree] = &mut *p; } mem::forget(p); unsafe { FL.nfree += 1; } } pub fn free (p: Box<Packet>) { engine::add_frees(); engine::add_freebytes(p.length as u64); // Calculate bits of physical capacity required for packet on 10GbE // Account for minimum data size and overhead of Ethernet preamble, CRC, // and inter-packet gap // https://netoptimizer.blogspot.com/2014/05/the-calculations-10gbits-wirespeed.html engine::add_freebits((12 + 8 + cmp::max(p.length as u64, 60) + 4) * 8); free_internal(p); } // Clone a packet pub fn clone (p: &Box<Packet>) -> Box<Packet> { let mut copy = allocate(); lib::copy(&mut copy.data, &p.data, p.length as usize); copy.length = p.length; copy } // pub fn debug() { // unsafe { // println!("FL.nfree: {}", FL.nfree); // println!("FL.list[FL.nfree].data[0]: {}", // FL.list[FL.nfree-1].as_mut().unwrap().data[0]); // } // } #[cfg(test)] mod selftest { use super::*; #[test] fn alloc() { let mut p = allocate(); println!("Allocated a packet of length {}", p.length); p.length = 1; p.data[0] = 42; //p.data[100000] = 99; // Would cause compile error println!("Mutating packet (length = {}, data[0] = {})", p.length, p.data[0]); let len = p.length; free(p); // Not freeing would cause panic println!("Freed a packet of length {}", len); //p.length = 2; // Would cause compile error } }
eallocate_step (
lib.rs
//! Explicit and efficient future that results from a branched future. //! //! The `union_future` macro creates a future derived from a branch of different underlying //! futures. The macro can prevent unnecessary boxing of futures when the code can branch //! into multiple future types. //! //! The macro works by exposing an enum that implements the Future trait, where the underlying //! future drives the polling behavior. The variants of the enum can have different underlying //! state machines (types that implement the `Future` trait). //! //! Additionally, the underlying branch state machines can have *different* Item types that are //! mapped to the `union_future` future's Item type via the `From` trait. //! //! Also, as an added bonus, the macro will derive the `From` trait for the underlying state //! machines in order to make the branched code clean. //! //! ## Installation //! //! Add this to your `Cargo.toml`: //! //! ```toml //! [dependencies] //! union-future = "0.1" //! futures = "0.1" //! ``` //! ## Examples //! //! The basic usage of the macro uses the same Item type from different underlying //! futures. For example, if you have a locally cached version otherwise the code //! will query the database: //! //! ``` //! #[macro_use] //! extern crate union_future; //! extern crate futures; //! //! use futures::*; //! use futures::future::*; //! //! //! // Macro will create the enum and necessary trait implementations //! // for the QueryFuture. This enum will have 2 variants: Cached and Db. //! union_future!(QueryFuture<u64, DbError>, //! Cached => FutureResult<u64, DbError>, //! Db => DbQueryFuture<u64>); //! //! // Example code that branches, using the future created by the macro //! pub fn query(db: &Db, key: &str) -> QueryFuture { //! // this example shows multiple ways the QueryFuture can be constructed: //! // either by the explicit enum variant or by using the From/Into traits //! if let Some(cached_val) = check_local_cache(key) { //! QueryFuture::Cached(ok(cached_val)) //! } else { //! query_db(db, key).into() //! } //! } //! //! fn check_local_cache(key: &str) -> Option<u64> { //! // ... //! # panic!("Unimplemented") //! } //! //! fn query_db(db: &Db, key: &str) -> DbQueryFuture<u64> { //! // ... //! # panic!("Unimplemented") //! } //! //! # pub struct DbError { //! # } //! # pub struct Db { //! # } //! # pub type DbQueryFuture<T> = Empty<T, DbError>; //! # fn main() {} //! ``` //! //! You could, however, have a future that can be mapped into the future result type //! with the `From` trait: //! //! ``` //! # #[macro_use] //! # extern crate union_future; //! # extern crate futures; //! # use futures::*; //! # use futures::future::*; //! pub enum RedisValue { //! Null, //! Integer(i64), //! Bulk(String), //! } //! //! // Implementing the From trait allows the underlying futures to expose //! // different Item types transparently //! //! impl From<()> for RedisValue { //! fn from(_: ()) -> RedisValue { //! RedisValue::Null //! } //! } //! //! impl From<i64> for RedisValue { //! fn from(other: i64) -> RedisValue { //! RedisValue::Integer(other) //! } //! } //! //! impl From<String> for RedisValue { //! fn from(other: String) -> RedisValue { //! RedisValue::Bulk(other) //! } //! } //! //! union_future!(RedisValueFuture<RedisValue, DbError>, //! Pong => FutureResult<(), DbError>, //! IntegerQuery => DbQueryFuture<i64>, //! StringQuery => DbQueryFuture<String>); //! //! # pub struct DbError { //! # } //! # pub struct MemDb { //! # } //! # pub type DbQueryFuture<T> = Empty<T, DbError>; //! # fn main() {} //! ``` #[macro_use] extern crate futures; /// A macro to create a future that has branched from multiple underlying futures of distinct /// types. #[macro_export] macro_rules! union_future { ($name:ident<$item:ty, $err:ty>, $($n:ident => $ft:ty),*) => ( pub enum $name { $( $n($ft) ),* } impl futures::Future for $name { type Item = $item; type Error = $err; fn poll(&mut self) -> futures::Poll<Self::Item, Self::Error> { match *self { $( $name::$n(ref mut f) => { match f.poll() { Ok(futures::Async::Ready(t)) => Ok(futures::Async::Ready(From::from(t))), Ok(futures::Async::NotReady) => Ok(futures::Async::NotReady), Err(e) => Err(From::from(e)), } } ),* } } } $( impl From<$ft> for $name { fn from(other: $ft) -> $name { $name::$n(other) } })* ); } #[cfg(test)] #[allow(dead_code)] mod tests { extern crate futures; use futures::*; use futures::future::*; #[derive(PartialEq, Debug, Eq)] pub enum Error { Fail, BigFail, } #[derive(PartialEq, Debug, Eq)] pub struct OtherError { op: u64 } impl From<OtherError> for Error { fn from(_: OtherError) -> Error
} #[test] fn same_types() { union_future!(TestFut<u64, Error>, Forever => Empty<u64, Error>, Immediate => FutureResult<u64, Error>); let mut a: TestFut = empty::<u64, Error>().into(); assert_eq!(Ok(Async::NotReady), a.poll()); let mut b: TestFut = ok::<u64, Error>(5).into(); assert_eq!(Ok(Async::Ready(5u64)), b.poll()); } #[test] fn different_item_types() { union_future!(TestFut<f64, Error>, Number => FutureResult<u32, Error>, Floating => FutureResult<f32, Error>); let mut a: TestFut = ok::<u32, Error>(5u32).into(); assert_eq!(Ok(Async::Ready(5f64)), a.poll()); let mut b: TestFut = ok::<f32, Error>(5.25f32).into(); assert_eq!(Ok(Async::Ready(5.25f64)), b.poll()); } #[test] fn different_err_types() { union_future!(TestFut<f64, Error>, Number => FutureResult<u32, Error>, Floating => FutureResult<f32, OtherError>); let mut a: TestFut = ok::<u32, Error>(5u32).into(); assert_eq!(Ok(Async::Ready(5f64)), a.poll()); let mut b: TestFut = ok::<f32, OtherError>(5.25f32).into(); assert_eq!(Ok(Async::Ready(5.25f64)), b.poll()); } }
{ Error::BigFail }
browser.py
import os from selenium import webdriver from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import TimeoutException from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.common.keys import Keys from .utils import randmized_sleep class Browser: def __init__(self, has_screen): dir_path = os.path.dirname(os.path.realpath(__file__)) useragent = "Mozilla/5.0 (X11; Linux i686; rv:77.0) Gecko/20100101 Firefox/77.0" service_args = ["--ignore-ssl-errors=true"] chrome_options = Options() chrome_options.add_argument(f'--user-agent={useragent}') if not has_screen: chrome_options.add_argument("--headless") chrome_options.add_argument("--start-maximized") chrome_options.add_argument("--no-sandbox") self.driver = webdriver.Chrome( executable_path="%s/bin/chromedriver" % dir_path, service_args=service_args, chrome_options=chrome_options, ) self.driver.implicitly_wait(5) @property def page_height(self): return self.driver.execute_script("return document.body.scrollHeight") def get(self, url): self.driver.get(url) @property def current_url(self): return self.driver.current_url def implicitly_wait(self, t): self.driver.implicitly_wait(t) def find_one(self, css_selector, elem=None, waittime=0): obj = elem or self.driver if waittime: WebDriverWait(obj, waittime).until( EC.presence_of_element_located((By.CSS_SELECTOR, css_selector)) ) try: return obj.find_element(By.CSS_SELECTOR, css_selector) except NoSuchElementException: return None def find(self, css_selector, elem=None, waittime=0): obj = elem or self.driver try: if waittime: WebDriverWait(obj, waittime).until( EC.presence_of_element_located((By.CSS_SELECTOR, css_selector)) ) except TimeoutException: return None try: return obj.find_elements(By.CSS_SELECTOR, css_selector) except NoSuchElementException: return None def scroll_down(self, wait=0.3): self.driver.execute_script("window.scrollTo(0, document.body.scrollHeight)") randmized_sleep(wait) def scroll_up(self, offset=-1, wait=2): if offset == -1: self.driver.execute_script("window.scrollTo(0, 0)") else: self.driver.execute_script("window.scrollBy(0, -%s)" % offset) randmized_sleep(wait) def js_click(self, elem): self.driver.execute_script("arguments[0].click();", elem) def open_new_tab(self, url): self.driver.execute_script("window.open('%s');" %url) self.driver.switch_to.window(self.driver.window_handles[1]) def close_current_tab(self): self.driver.close() self.driver.switch_to.window(self.driver.window_handles[0]) def __del__(self): try: self.driver.quit() except Exception:
pass
setup.py
# Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """A setup module for the GRPC Python package.""" from distutils import cygwinccompiler from distutils import extension as _extension from distutils import util import os import os.path import pkg_resources import platform import re import shlex import shutil import sys import sysconfig import setuptools from setuptools.command import egg_info # Redirect the manifest template from MANIFEST.in to PYTHON-MANIFEST.in. egg_info.manifest_maker.template = 'PYTHON-MANIFEST.in' PY3 = sys.version_info.major == 3 PYTHON_STEM = os.path.join('src', 'python', 'grpcio') CORE_INCLUDE = ('include', '.',) BORINGSSL_INCLUDE = (os.path.join('third_party', 'boringssl', 'include'),) ZLIB_INCLUDE = (os.path.join('third_party', 'zlib'),) # Ensure we're in the proper directory whether or not we're being used by pip. os.chdir(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, os.path.abspath(PYTHON_STEM)) # Break import-style to ensure we can actually find our in-repo dependencies. import _spawn_patch import commands import grpc_core_dependencies import grpc_version _spawn_patch.monkeypatch_spawn() LICENSE = '3-clause BSD' # Environment variable to determine whether or not the Cython extension should # *use* Cython or use the generated C files. Note that this requires the C files # to have been generated by building first *with* Cython support. Even if this # is set to false, if the script detects that the generated `.c` file isn't # present, then it will still attempt to use Cython. BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False) # Environment variable to determine whether or not to enable coverage analysis # in Cython modules. ENABLE_CYTHON_TRACING = os.environ.get( 'GRPC_PYTHON_ENABLE_CYTHON_TRACING', False) # There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are # entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support. # We use these environment variables to thus get around that without locking # ourselves in w.r.t. the multitude of operating systems this ought to build on. # We can also use these variables as a way to inject environment-specific # compiler/linker flags. We assume GCC-like compilers and/or MinGW as a # reasonable default. EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None) EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None) if EXTRA_ENV_COMPILE_ARGS is None: EXTRA_ENV_COMPILE_ARGS = '' if 'win32' in sys.platform and sys.version_info < (3, 5): # We use define flags here and don't directly add to DEFINE_MACROS below to # ensure that the expert user/builder has a way of turning it off (via the # envvars) without adding yet more GRPC-specific envvars. # See https://sourceforge.net/p/mingw-w64/bugs/363/ if '32' in platform.architecture()[0]: EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s' else: EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64' elif "linux" in sys.platform or "darwin" in sys.platform: EXTRA_ENV_COMPILE_ARGS += ' -fvisibility=hidden -fno-wrapv' if EXTRA_ENV_LINK_ARGS is None: EXTRA_ENV_LINK_ARGS = '' if "linux" in sys.platform or "darwin" in sys.platform: EXTRA_ENV_LINK_ARGS += ' -lpthread' elif "win32" in sys.platform and sys.version_info < (3, 5): msvcr = cygwinccompiler.get_msvcr()[0] # TODO(atash) sift through the GCC specs to see if libstdc++ can have any # influence on the linkage outcome on MinGW for non-C++ programs. EXTRA_ENV_LINK_ARGS += ( ' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} ' '-static'.format(msvcr=msvcr)) if "linux" in sys.platform: EXTRA_ENV_LINK_ARGS += ' -Wl,-wrap,memcpy' EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS) EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS) CYTHON_EXTENSION_PACKAGE_NAMES = () CYTHON_EXTENSION_MODULE_NAMES = ('grpc._cython.cygrpc',) CYTHON_HELPER_C_FILES = () CORE_C_FILES = tuple(grpc_core_dependencies.CORE_SOURCE_FILES) EXTENSION_INCLUDE_DIRECTORIES = ( (PYTHON_STEM,) + CORE_INCLUDE + BORINGSSL_INCLUDE + ZLIB_INCLUDE) EXTENSION_LIBRARIES = () if "linux" in sys.platform: EXTENSION_LIBRARIES += ('rt',) if not "win32" in sys.platform: EXTENSION_LIBRARIES += ('m',) if "win32" in sys.platform: EXTENSION_LIBRARIES += ('advapi32', 'ws2_32',) DEFINE_MACROS = ( ('OPENSSL_NO_ASM', 1), ('_WIN32_WINNT', 0x600), ('GPR_BACKWARDS_COMPATIBILITY_MODE', 1),) if "win32" in sys.platform: DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),) if '64bit' in platform.architecture()[0]: DEFINE_MACROS += (('MS_WIN64', 1),) elif sys.version_info >= (3, 5): # For some reason, this is needed to get access to inet_pton/inet_ntop # on msvc, but only for 32 bits DEFINE_MACROS += (('NTDDI_VERSION', 0x06000000),) LDFLAGS = tuple(EXTRA_LINK_ARGS) CFLAGS = tuple(EXTRA_COMPILE_ARGS) if "linux" in sys.platform or "darwin" in sys.platform: pymodinit_type = 'PyObject*' if PY3 else 'void' pymodinit = '__attribute__((visibility ("default"))) {}'.format(pymodinit_type) DEFINE_MACROS += (('PyMODINIT_FUNC', pymodinit),) # By default, Python3 distutils enforces compatibility of # c plugins (.so files) with the OSX version Python3 was built with. # For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread) if 'darwin' in sys.platform and PY3: mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') if mac_target and (pkg_resources.parse_version(mac_target) < pkg_resources.parse_version('10.7.0')): os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.7' os.environ['_PYTHON_HOST_PLATFORM'] = re.sub( r'macosx-[0-9]+\.[0-9]+-(.+)', r'macosx-10.7-\1', util.get_platform()) def cython_extensions_and_necessity(): cython_module_files = [os.path.join(PYTHON_STEM, name.replace('.', '/') + '.pyx') for name in CYTHON_EXTENSION_MODULE_NAMES] extensions = [ _extension.Extension( name=module_name, sources=[module_file] + list(CYTHON_HELPER_C_FILES) + list(CORE_C_FILES), include_dirs=list(EXTENSION_INCLUDE_DIRECTORIES), libraries=list(EXTENSION_LIBRARIES), define_macros=list(DEFINE_MACROS), extra_compile_args=list(CFLAGS), extra_link_args=list(LDFLAGS), ) for (module_name, module_file) in zip(list(CYTHON_EXTENSION_MODULE_NAMES), cython_module_files) ] need_cython = BUILD_WITH_CYTHON if not BUILD_WITH_CYTHON: need_cython = need_cython or not commands.check_and_update_cythonization(extensions) return commands.try_cythonize(extensions, linetracing=ENABLE_CYTHON_TRACING, mandatory=BUILD_WITH_CYTHON), need_cython CYTHON_EXTENSION_MODULES, need_cython = cython_extensions_and_necessity() PACKAGE_DIRECTORIES = { '': PYTHON_STEM, } INSTALL_REQUIRES = ( 'six>=1.5.2', 'enum34>=1.0.4', 'futures>=2.2.0', # TODO(atash): eventually split the grpcio package into a metapackage # depending on protobuf and the runtime component (independent of protobuf) 'protobuf>=3.0.0', ) SETUP_REQUIRES = INSTALL_REQUIRES + ( 'sphinx>=1.3', 'sphinx_rtd_theme>=0.1.8', 'six>=1.10', ) if BUILD_WITH_CYTHON: sys.stderr.write( "You requested a Cython build via GRPC_PYTHON_BUILD_WITH_CYTHON, " "but do not have Cython installed. We won't stop you from using " "other commands, but the extension files will fail to build.\n") elif need_cython: sys.stderr.write( 'We could not find Cython. Setup may take 10-20 minutes.\n') SETUP_REQUIRES += ('cython>=0.23',) COMMAND_CLASS = { 'doc': commands.SphinxDocumentation, 'build_project_metadata': commands.BuildProjectMetadata, 'build_py': commands.BuildPy, 'build_ext': commands.BuildExt, 'gather': commands.Gather, } # Ensure that package data is copied over before any commands have been run: credentials_dir = os.path.join(PYTHON_STEM, 'grpc', '_cython', '_credentials') try: os.mkdir(credentials_dir) except OSError: pass shutil.copyfile(os.path.join('etc', 'roots.pem'), os.path.join(credentials_dir, 'roots.pem')) PACKAGE_DATA = { # Binaries that may or may not be present in the final installation, but are # mentioned here for completeness. 'grpc._cython': [ '_credentials/roots.pem', '_windows/grpc_c.32.python', '_windows/grpc_c.64.python', ], } PACKAGES = setuptools.find_packages(PYTHON_STEM) setuptools.setup( name='grpcio', version=grpc_version.VERSION, license=LICENSE, ext_modules=CYTHON_EXTENSION_MODULES,
packages=list(PACKAGES), package_dir=PACKAGE_DIRECTORIES, package_data=PACKAGE_DATA, install_requires=INSTALL_REQUIRES, setup_requires=SETUP_REQUIRES, cmdclass=COMMAND_CLASS, )
mod.rs
// ignore-tidy-filelength //! Rustdoc's HTML rendering module. //! //! This modules contains the bulk of the logic necessary for rendering a //! rustdoc `clean::Crate` instance to a set of static HTML pages. This //! rendering process is largely driven by the `format!` syntax extension to //! perform all I/O into files and streams. //! //! The rendering process is largely driven by the `Context` and `Cache` //! structures. The cache is pre-populated by crawling the crate in question, //! and then it is shared among the various rendering threads. The cache is meant //! to be a fairly large structure not implementing `Clone` (because it's shared //! among threads). The context, however, should be a lightweight structure. This //! is cloned per-thread and contains information about what is currently being //! rendered. //! //! In order to speed up rendering (mostly because of markdown rendering), the //! rendering process has been parallelized. This parallelization is only //! exposed through the `crate` method on the context, and then also from the //! fact that the shared cache is stored in TLS (and must be accessed as such). //! //! In addition to rendering the crate itself, this module is also responsible //! for creating the corresponding search index and source file renderings. //! These threads are not parallelized (they haven't been a bottleneck yet), and //! both occur before the crate is rendered. crate mod cache; #[cfg(test)] mod tests; use std::cell::{Cell, RefCell}; use std::cmp::Ordering; use std::collections::{BTreeMap, VecDeque}; use std::default::Default; use std::ffi::OsStr; use std::fmt::{self, Write}; use std::fs::{self, File}; use std::io::prelude::*; use std::io::{self, BufReader}; use std::path::{Component, Path, PathBuf}; use std::rc::Rc; use std::str; use std::string::ToString; use std::sync::mpsc::{channel, Receiver}; use std::sync::Arc; use itertools::Itertools; use rustc_ast_pretty::pprust; use rustc_attr::{Deprecation, StabilityLevel}; use rustc_data_structures::flock; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_hir::Mutability; use rustc_middle::middle::stability; use rustc_middle::ty; use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_span::edition::Edition; use rustc_span::hygiene::MacroKind; use rustc_span::source_map::FileName; use rustc_span::symbol::{kw, sym, Symbol}; use serde::ser::SerializeSeq; use serde::{Serialize, Serializer}; use crate::clean::{self, AttributesExt, GetDefId, RenderedLink, SelfTy, TypeKind}; use crate::config::{RenderInfo, RenderOptions}; use crate::docfs::{DocFS, PathError}; use crate::doctree; use crate::error::Error; use crate::formats::cache::{cache, Cache}; use crate::formats::item_type::ItemType; use crate::formats::{AssocItemRender, FormatRenderer, Impl, RenderMode}; use crate::html::escape::Escape; use crate::html::format::fmt_impl_for_trait_page; use crate::html::format::Function; use crate::html::format::{href, print_default_space, print_generic_bounds, WhereClause}; use crate::html::format::{print_abi_with_space, Buffer, PrintWithSpace}; use crate::html::markdown::{ self, plain_text_summary, ErrorCodes, IdMap, Markdown, MarkdownHtml, MarkdownSummaryLine, }; use crate::html::sources; use crate::html::{highlight, layout, static_files}; use cache::{build_index, ExternalLocation}; /// A pair of name and its optional document. crate type NameDoc = (String, Option<String>); crate fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ { crate::html::format::display_fn(move |f| { if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { write!(f, "{}", v) } }) } /// Major driving force in all rustdoc rendering. This contains information /// about where in the tree-like hierarchy rendering is occurring and controls /// how the current page is being rendered. /// /// It is intended that this context is a lightweight object which can be fairly /// easily cloned because it is cloned per work-job (about once per item in the /// rustdoc tree). #[derive(Clone)] crate struct Context<'tcx> { /// Current hierarchy of components leading down to what's currently being /// rendered crate current: Vec<String>, /// The current destination folder of where HTML artifacts should be placed. /// This changes as the context descends into the module hierarchy. crate dst: PathBuf, /// A flag, which when `true`, will render pages which redirect to the /// real location of an item. This is used to allow external links to /// publicly reused items to redirect to the right location. crate render_redirect_pages: bool, /// The map used to ensure all generated 'id=' attributes are unique. id_map: Rc<RefCell<IdMap>>, crate shared: Arc<SharedContext<'tcx>>, all: Rc<RefCell<AllTypes>>, /// Storage for the errors produced while generating documentation so they /// can be printed together at the end. crate errors: Rc<Receiver<String>>, } crate struct SharedContext<'tcx> { crate tcx: TyCtxt<'tcx>, /// The path to the crate root source minus the file name. /// Used for simplifying paths to the highlighted source code files. crate src_root: PathBuf, /// This describes the layout of each page, and is not modified after /// creation of the context (contains info like the favicon and added html). crate layout: layout::Layout, /// This flag indicates whether `[src]` links should be generated or not. If /// the source files are present in the html rendering, then this will be /// `true`. crate include_sources: bool, /// The local file sources we've emitted and their respective url-paths. crate local_sources: FxHashMap<PathBuf, String>, /// Whether the collapsed pass ran crate collapsed: bool, /// The base-URL of the issue tracker for when an item has been tagged with /// an issue number. crate issue_tracker_base_url: Option<String>, /// The directories that have already been created in this doc run. Used to reduce the number /// of spurious `create_dir_all` calls. crate created_dirs: RefCell<FxHashSet<PathBuf>>, /// This flag indicates whether listings of modules (in the side bar and documentation itself) /// should be ordered alphabetically or in order of appearance (in the source code). crate sort_modules_alphabetically: bool, /// Additional CSS files to be added to the generated docs. crate style_files: Vec<StylePath>, /// Suffix to be added on resource files (if suffix is "-v2" then "light.css" becomes /// "light-v2.css"). crate resource_suffix: String, /// Optional path string to be used to load static files on output pages. If not set, uses /// combinations of `../` to reach the documentation root. crate static_root_path: Option<String>, /// The fs handle we are working with. crate fs: DocFS, /// The default edition used to parse doctests. crate edition: Edition, crate codes: ErrorCodes, playground: Option<markdown::Playground>, } impl<'tcx> Context<'tcx> { fn path(&self, filename: &str) -> PathBuf { // We use splitn vs Path::extension here because we might get a filename // like `style.min.css` and we want to process that into // `style-suffix.min.css`. Path::extension would just return `css` // which would result in `style.min-suffix.css` which isn't what we // want. let (base, ext) = filename.split_once('.').unwrap(); let filename = format!("{}{}.{}", base, self.shared.resource_suffix, ext); self.dst.join(&filename) } fn tcx(&self) -> TyCtxt<'tcx> { self.shared.tcx } fn sess(&self) -> &Session { &self.shared.tcx.sess } } impl SharedContext<'_> { crate fn ensure_dir(&self, dst: &Path) -> Result<(), Error> { let mut dirs = self.created_dirs.borrow_mut(); if !dirs.contains(dst) { try_err!(self.fs.create_dir_all(dst), dst); dirs.insert(dst.to_path_buf()); } Ok(()) } /// Based on whether the `collapse-docs` pass was run, return either the `doc_value` or the /// `collapsed_doc_value` of the given item. crate fn maybe_collapsed_doc_value<'a>(&self, item: &'a clean::Item) -> Option<String> { if self.collapsed { item.collapsed_doc_value() } else { item.doc_value() } } } // Helper structs for rendering items/sidebars and carrying along contextual // information /// Struct representing one entry in the JS search index. These are all emitted /// by hand to a large JS file at the end of cache-creation. #[derive(Debug)] crate struct IndexItem { crate ty: ItemType, crate name: String, crate path: String, crate desc: String, crate parent: Option<DefId>, crate parent_idx: Option<usize>, crate search_type: Option<IndexItemFunctionType>, } impl Serialize for IndexItem { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { assert_eq!( self.parent.is_some(), self.parent_idx.is_some(), "`{}` is missing idx", self.name ); (self.ty, &self.name, &self.path, &self.desc, self.parent_idx, &self.search_type) .serialize(serializer) } } /// A type used for the search index. #[derive(Debug)] crate struct RenderType { ty: Option<DefId>, idx: Option<usize>, name: Option<String>, generics: Option<Vec<Generic>>, } impl Serialize for RenderType { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if let Some(name) = &self.name { let mut seq = serializer.serialize_seq(None)?; if let Some(id) = self.idx { seq.serialize_element(&id)?; } else { seq.serialize_element(&name)?; } if let Some(generics) = &self.generics { seq.serialize_element(&generics)?; } seq.end() } else { serializer.serialize_none() } } } /// A type used for the search index. #[derive(Debug)] crate struct Generic { name: String, defid: Option<DefId>, idx: Option<usize>, } impl Serialize for Generic { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if let Some(id) = self.idx { serializer.serialize_some(&id) } else { serializer.serialize_some(&self.name) } } } /// Full type of functions/methods in the search index. #[derive(Debug)] crate struct IndexItemFunctionType { inputs: Vec<TypeWithKind>, output: Option<Vec<TypeWithKind>>, } impl Serialize for IndexItemFunctionType { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { // If we couldn't figure out a type, just write `null`. let mut iter = self.inputs.iter(); if match self.output { Some(ref output) => iter.chain(output.iter()).any(|ref i| i.ty.name.is_none()), None => iter.any(|ref i| i.ty.name.is_none()), } { serializer.serialize_none() } else { let mut seq = serializer.serialize_seq(None)?; seq.serialize_element(&self.inputs)?; if let Some(output) = &self.output { if output.len() > 1 { seq.serialize_element(&output)?; } else { seq.serialize_element(&output[0])?; } } seq.end() } } } #[derive(Debug)] crate struct TypeWithKind { ty: RenderType, kind: TypeKind, } impl From<(RenderType, TypeKind)> for TypeWithKind { fn from(x: (RenderType, TypeKind)) -> TypeWithKind { TypeWithKind { ty: x.0, kind: x.1 } } } impl Serialize for TypeWithKind { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut seq = serializer.serialize_seq(None)?; seq.serialize_element(&self.ty.name)?; let x: ItemType = self.kind.into(); seq.serialize_element(&x)?; seq.end() } } #[derive(Debug, Clone)] crate struct StylePath { /// The path to the theme crate path: PathBuf, /// What the `disabled` attribute should be set to in the HTML tag crate disabled: bool, } thread_local!(crate static CURRENT_DEPTH: Cell<usize> = Cell::new(0)); crate fn initial_ids() -> Vec<String> { [ "main", "search", "help", "TOC", "render-detail", "associated-types", "associated-const", "required-methods", "provided-methods", "implementors", "synthetic-implementors", "implementors-list", "synthetic-implementors-list", "methods", "deref-methods", "implementations", ] .iter() .map(|id| (String::from(*id))) .collect() } /// Generates the documentation for `crate` into the directory `dst` impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { fn init( mut krate: clean::Crate, options: RenderOptions, _render_info: RenderInfo, edition: Edition, cache: &mut Cache, tcx: ty::TyCtxt<'tcx>, ) -> Result<(Self, clean::Crate), Error> { // need to save a copy of the options for rendering the index page let md_opts = options.clone(); let RenderOptions { output, external_html, id_map, playground_url, sort_modules_alphabetically, themes: style_files, default_settings, extension_css, resource_suffix, static_root_path, generate_search_filter, unstable_features, .. } = options; let src_root = match krate.src { FileName::Real(ref p) => match p.local_path().parent() { Some(p) => p.to_path_buf(), None => PathBuf::new(), }, _ => PathBuf::new(), }; // If user passed in `--playground-url` arg, we fill in crate name here let mut playground = None; if let Some(url) = playground_url { playground = Some(markdown::Playground { crate_name: Some(krate.name.to_string()), url }); } let mut layout = layout::Layout { logo: String::new(), favicon: String::new(), external_html, default_settings, krate: krate.name.to_string(), css_file_extension: extension_css, generate_search_filter, }; let mut issue_tracker_base_url = None; let mut include_sources = true; // Crawl the crate attributes looking for attributes which control how we're // going to emit HTML if let Some(attrs) = krate.module.as_ref().map(|m| &m.attrs) { for attr in attrs.lists(sym::doc) { match (attr.name_or_empty(), attr.value_str()) { (sym::html_favicon_url, Some(s)) => { layout.favicon = s.to_string(); } (sym::html_logo_url, Some(s)) => { layout.logo = s.to_string(); } (sym::html_playground_url, Some(s)) => { playground = Some(markdown::Playground { crate_name: Some(krate.name.to_string()), url: s.to_string(), }); } (sym::issue_tracker_base_url, Some(s)) => { issue_tracker_base_url = Some(s.to_string()); } (sym::html_no_source, None) if attr.is_word() => { include_sources = false; } _ => {} } } } let (sender, receiver) = channel(); let mut scx = SharedContext { tcx, collapsed: krate.collapsed, src_root, include_sources, local_sources: Default::default(), issue_tracker_base_url, layout, created_dirs: Default::default(), sort_modules_alphabetically, style_files, resource_suffix, static_root_path, fs: DocFS::new(sender), edition, codes: ErrorCodes::from(unstable_features.is_nightly_build()), playground, }; // Add the default themes to the `Vec` of stylepaths // // Note that these must be added before `sources::render` is called // so that the resulting source pages are styled // // `light.css` is not disabled because it is the stylesheet that stays loaded // by the browser as the theme stylesheet. The theme system (hackily) works by // changing the href to this stylesheet. All other themes are disabled to // prevent rule conflicts scx.style_files.push(StylePath { path: PathBuf::from("light.css"), disabled: false }); scx.style_files.push(StylePath { path: PathBuf::from("dark.css"), disabled: true }); scx.style_files.push(StylePath { path: PathBuf::from("ayu.css"), disabled: true }); let dst = output; scx.ensure_dir(&dst)?; krate = sources::render(&dst, &mut scx, krate)?; // Build our search index let index = build_index(&krate, cache); let cache = Arc::new(cache); let mut cx = Context { current: Vec::new(), dst, render_redirect_pages: false, id_map: Rc::new(RefCell::new(id_map)), shared: Arc::new(scx), all: Rc::new(RefCell::new(AllTypes::new())), errors: Rc::new(receiver), }; CURRENT_DEPTH.with(|s| s.set(0)); // Write shared runs within a flock; disable thread dispatching of IO temporarily. Arc::get_mut(&mut cx.shared).unwrap().fs.set_sync_only(true); write_shared(&cx, &krate, index, &md_opts, &cache)?; Arc::get_mut(&mut cx.shared).unwrap().fs.set_sync_only(false); Ok((cx, krate)) } fn after_run(&mut self, diag: &rustc_errors::Handler) -> Result<(), Error> { Arc::get_mut(&mut self.shared).unwrap().fs.close(); let nb_errors = self.errors.iter().map(|err| diag.struct_err(&err).emit()).count(); if nb_errors > 0 { Err(Error::new(io::Error::new(io::ErrorKind::Other, "I/O error"), "")) } else { Ok(()) } } fn after_krate(&mut self, krate: &clean::Crate, cache: &Cache) -> Result<(), Error> { let final_file = self.dst.join(&*krate.name.as_str()).join("all.html"); let settings_file = self.dst.join("settings.html"); let crate_name = krate.name; let mut root_path = self.dst.to_str().expect("invalid path").to_owned(); if !root_path.ends_with('/') { root_path.push('/'); } let mut page = layout::Page { title: "List of all items in this crate", css_class: "mod", root_path: "../", static_root_path: self.shared.static_root_path.as_deref(), description: "List of all items in this crate", keywords: BASIC_KEYWORDS, resource_suffix: &self.shared.resource_suffix, extra_scripts: &[], static_extra_scripts: &[], }; let sidebar = if let Some(ref version) = cache.crate_version { format!( "<p class=\"location\">Crate {}</p>\ <div class=\"block version\">\ <p>Version {}</p>\ </div>\ <a id=\"all-types\" href=\"index.html\"><p>Back to index</p></a>", crate_name, Escape(version), ) } else { String::new() }; let all = self.all.replace(AllTypes::new()); let v = layout::render( &self.shared.layout, &page, sidebar, |buf: &mut Buffer| all.print(buf), &self.shared.style_files, ); self.shared.fs.write(&final_file, v.as_bytes())?; // Generating settings page. page.title = "Rustdoc settings"; page.description = "Settings of Rustdoc"; page.root_path = "./"; let mut style_files = self.shared.style_files.clone(); let sidebar = "<p class=\"location\">Settings</p><div class=\"sidebar-elems\"></div>"; style_files.push(StylePath { path: PathBuf::from("settings.css"), disabled: false }); let v = layout::render( &self.shared.layout, &page, sidebar, settings( self.shared.static_root_path.as_deref().unwrap_or("./"), &self.shared.resource_suffix, &self.shared.style_files, )?, &style_files, ); self.shared.fs.write(&settings_file, v.as_bytes())?; Ok(()) } fn mod_item_in( &mut self, item: &clean::Item, item_name: &str, cache: &Cache, ) -> Result<(), Error> { // Stripped modules survive the rustdoc passes (i.e., `strip-private`) // if they contain impls for public types. These modules can also // contain items such as publicly re-exported structures. // // External crates will provide links to these structures, so // these modules are recursed into, but not rendered normally // (a flag on the context). if !self.render_redirect_pages { self.render_redirect_pages = item.is_stripped(); } let scx = &self.shared; self.dst.push(item_name); self.current.push(item_name.to_owned()); info!("Recursing into {}", self.dst.display()); let buf = self.render_item(item, false, cache); // buf will be empty if the module is stripped and there is no redirect for it if !buf.is_empty() { self.shared.ensure_dir(&self.dst)?; let joint_dst = self.dst.join("index.html"); scx.fs.write(&joint_dst, buf.as_bytes())?; } // Render sidebar-items.js used throughout this module. if !self.render_redirect_pages { let module = match *item.kind { clean::StrippedItem(box clean::ModuleItem(ref m)) | clean::ModuleItem(ref m) => m, _ => unreachable!(), }; let items = self.build_sidebar_items(module); let js_dst = self.dst.join("sidebar-items.js"); let v = format!("initSidebarItems({});", serde_json::to_string(&items).unwrap()); scx.fs.write(&js_dst, &v)?; } Ok(()) } fn mod_item_out(&mut self, _item_name: &str) -> Result<(), Error> { info!("Recursed; leaving {}", self.dst.display()); // Go back to where we were at self.dst.pop(); self.current.pop(); Ok(()) } fn item(&mut self, item: clean::Item, cache: &Cache) -> Result<(), Error> { // Stripped modules survive the rustdoc passes (i.e., `strip-private`) // if they contain impls for public types. These modules can also // contain items such as publicly re-exported structures. // // External crates will provide links to these structures, so // these modules are recursed into, but not rendered normally // (a flag on the context). if !self.render_redirect_pages { self.render_redirect_pages = item.is_stripped(); } let buf = self.render_item(&item, true, cache); // buf will be empty if the item is stripped and there is no redirect for it if !buf.is_empty() { let name = item.name.as_ref().unwrap(); let item_type = item.type_(); let file_name = &item_path(item_type, &name.as_str()); self.shared.ensure_dir(&self.dst)?; let joint_dst = self.dst.join(file_name); self.shared.fs.write(&joint_dst, buf.as_bytes())?; if !self.render_redirect_pages { self.all.borrow_mut().append(full_path(self, &item), &item_type); } // If the item is a macro, redirect from the old macro URL (with !) // to the new one (without). if item_type == ItemType::Macro { let redir_name = format!("{}.{}!.html", item_type, name); let redir_dst = self.dst.join(redir_name); let v = layout::redirect(file_name); self.shared.fs.write(&redir_dst, v.as_bytes())?; } } Ok(()) } } fn write_shared( cx: &Context<'_>, krate: &clean::Crate, search_index: String, options: &RenderOptions, cache: &Cache, ) -> Result<(), Error> { // Write out the shared files. Note that these are shared among all rustdoc // docs placed in the output directory, so this needs to be a synchronized // operation with respect to all other rustdocs running around. let lock_file = cx.dst.join(".lock"); let _lock = try_err!(flock::Lock::new(&lock_file, true, true, true), &lock_file); // Add all the static files. These may already exist, but we just // overwrite them anyway to make sure that they're fresh and up-to-date. write_minify( &cx.shared.fs, cx.path("rustdoc.css"), static_files::RUSTDOC_CSS, options.enable_minification, )?; write_minify( &cx.shared.fs, cx.path("settings.css"), static_files::SETTINGS_CSS, options.enable_minification, )?; write_minify( &cx.shared.fs, cx.path("noscript.css"), static_files::NOSCRIPT_CSS, options.enable_minification, )?; // To avoid "light.css" to be overwritten, we'll first run over the received themes and only // then we'll run over the "official" styles. let mut themes: FxHashSet<String> = FxHashSet::default(); for entry in &cx.shared.style_files { let theme = try_none!(try_none!(entry.path.file_stem(), &entry.path).to_str(), &entry.path); let extension = try_none!(try_none!(entry.path.extension(), &entry.path).to_str(), &entry.path); // Handle the official themes match theme { "light" => write_minify( &cx.shared.fs, cx.path("light.css"), static_files::themes::LIGHT, options.enable_minification, )?, "dark" => write_minify( &cx.shared.fs, cx.path("dark.css"), static_files::themes::DARK, options.enable_minification, )?, "ayu" => write_minify( &cx.shared.fs, cx.path("ayu.css"), static_files::themes::AYU, options.enable_minification, )?, _ => { // Handle added third-party themes let content = try_err!(fs::read(&entry.path), &entry.path); cx.shared .fs .write(cx.path(&format!("{}.{}", theme, extension)), content.as_slice())?; } }; themes.insert(theme.to_owned()); } let write = |p, c| cx.shared.fs.write(p, c); if (*cx.shared).layout.logo.is_empty() { write(cx.path("rust-logo.png"), static_files::RUST_LOGO)?; } if (*cx.shared).layout.favicon.is_empty() { write(cx.path("favicon.svg"), static_files::RUST_FAVICON_SVG)?; write(cx.path("favicon-16x16.png"), static_files::RUST_FAVICON_PNG_16)?; write(cx.path("favicon-32x32.png"), static_files::RUST_FAVICON_PNG_32)?; } write(cx.path("brush.svg"), static_files::BRUSH_SVG)?; write(cx.path("wheel.svg"), static_files::WHEEL_SVG)?; write(cx.path("down-arrow.svg"), static_files::DOWN_ARROW_SVG)?; let mut themes: Vec<&String> = themes.iter().collect(); themes.sort(); // To avoid theme switch latencies as much as possible, we put everything theme related // at the beginning of the html files into another js file. let theme_js = format!( r#"var themes = document.getElementById("theme-choices"); var themePicker = document.getElementById("theme-picker"); function showThemeButtonState() {{ themes.style.display = "block"; themePicker.style.borderBottomRightRadius = "0"; themePicker.style.borderBottomLeftRadius = "0"; }} function hideThemeButtonState() {{ themes.style.display = "none"; themePicker.style.borderBottomRightRadius = "3px"; themePicker.style.borderBottomLeftRadius = "3px"; }} function switchThemeButtonState() {{ if (themes.style.display === "block") {{ hideThemeButtonState(); }} else {{ showThemeButtonState(); }} }}; function handleThemeButtonsBlur(e) {{ var active = document.activeElement; var related = e.relatedTarget; if (active.id !== "theme-picker" && (!active.parentNode || active.parentNode.id !== "theme-choices") && (!related || (related.id !== "theme-picker" && (!related.parentNode || related.parentNode.id !== "theme-choices")))) {{ hideThemeButtonState(); }} }} themePicker.onclick = switchThemeButtonState; themePicker.onblur = handleThemeButtonsBlur; {}.forEach(function(item) {{ var but = document.createElement("button"); but.textContent = item; but.onclick = function(el) {{ switchTheme(currentTheme, mainTheme, item, true); useSystemTheme(false); }}; but.onblur = handleThemeButtonsBlur; themes.appendChild(but); }});"#, serde_json::to_string(&themes).unwrap() ); write_minify(&cx.shared.fs, cx.path("theme.js"), &theme_js, options.enable_minification)?; write_minify( &cx.shared.fs, cx.path("main.js"), static_files::MAIN_JS, options.enable_minification, )?; write_minify( &cx.shared.fs, cx.path("settings.js"), static_files::SETTINGS_JS, options.enable_minification, )?; if cx.shared.include_sources { write_minify( &cx.shared.fs, cx.path("source-script.js"), static_files::sidebar::SOURCE_SCRIPT, options.enable_minification, )?; } { write_minify( &cx.shared.fs, cx.path("storage.js"), &format!( "var resourcesSuffix = \"{}\";{}", cx.shared.resource_suffix, static_files::STORAGE_JS ), options.enable_minification, )?; } if let Some(ref css) = cx.shared.layout.css_file_extension { let out = cx.path("theme.css"); let buffer = try_err!(fs::read_to_string(css), css); if !options.enable_minification { cx.shared.fs.write(&out, &buffer)?; } else { write_minify(&cx.shared.fs, out, &buffer, options.enable_minification)?; } } write_minify( &cx.shared.fs, cx.path("normalize.css"), static_files::NORMALIZE_CSS, options.enable_minification, )?; write(cx.dst.join("FiraSans-Regular.woff"), static_files::fira_sans::REGULAR)?; write(cx.dst.join("FiraSans-Medium.woff"), static_files::fira_sans::MEDIUM)?; write(cx.dst.join("FiraSans-LICENSE.txt"), static_files::fira_sans::LICENSE)?; write(cx.dst.join("SourceSerifPro-Regular.ttf.woff"), static_files::source_serif_pro::REGULAR)?; write(cx.dst.join("SourceSerifPro-Bold.ttf.woff"), static_files::source_serif_pro::BOLD)?; write(cx.dst.join("SourceSerifPro-It.ttf.woff"), static_files::source_serif_pro::ITALIC)?; write(cx.dst.join("SourceSerifPro-LICENSE.md"), static_files::source_serif_pro::LICENSE)?; write(cx.dst.join("SourceCodePro-Regular.woff"), static_files::source_code_pro::REGULAR)?; write(cx.dst.join("SourceCodePro-Semibold.woff"), static_files::source_code_pro::SEMIBOLD)?; write(cx.dst.join("SourceCodePro-LICENSE.txt"), static_files::source_code_pro::LICENSE)?; write(cx.dst.join("LICENSE-MIT.txt"), static_files::LICENSE_MIT)?; write(cx.dst.join("LICENSE-APACHE.txt"), static_files::LICENSE_APACHE)?; write(cx.dst.join("COPYRIGHT.txt"), static_files::COPYRIGHT)?; fn collect(path: &Path, krate: &str, key: &str) -> io::Result<(Vec<String>, Vec<String>)> { let mut ret = Vec::new(); let mut krates = Vec::new(); if path.exists() { for line in BufReader::new(File::open(path)?).lines() { let line = line?; if !line.starts_with(key) { continue; } if line.starts_with(&format!(r#"{}["{}"]"#, key, krate)) { continue; } ret.push(line.to_string()); krates.push( line[key.len() + 2..] .split('"') .next() .map(|s| s.to_owned()) .unwrap_or_else(String::new), ); } } Ok((ret, krates)) } fn collect_json(path: &Path, krate: &str) -> io::Result<(Vec<String>, Vec<String>)> { let mut ret = Vec::new(); let mut krates = Vec::new(); if path.exists() { for line in BufReader::new(File::open(path)?).lines() { let line = line?; if !line.starts_with('"') { continue; } if line.starts_with(&format!("\"{}\"", krate)) { continue; } if line.ends_with(",\\") { ret.push(line[..line.len() - 2].to_string()); } else { // Ends with "\\" (it's the case for the last added crate line) ret.push(line[..line.len() - 1].to_string()); } krates.push( line.split('"') .find(|s| !s.is_empty()) .map(|s| s.to_owned()) .unwrap_or_else(String::new), ); } } Ok((ret, krates)) } use std::ffi::OsString; #[derive(Debug)] struct Hierarchy { elem: OsString, children: FxHashMap<OsString, Hierarchy>, elems: FxHashSet<OsString>, } impl Hierarchy { fn new(elem: OsString) -> Hierarchy { Hierarchy { elem, children: FxHashMap::default(), elems: FxHashSet::default() } } fn to_json_string(&self) -> String { let mut subs: Vec<&Hierarchy> = self.children.values().collect(); subs.sort_unstable_by(|a, b| a.elem.cmp(&b.elem)); let mut files = self .elems .iter() .map(|s| format!("\"{}\"", s.to_str().expect("invalid osstring conversion"))) .collect::<Vec<_>>(); files.sort_unstable(); let subs = subs.iter().map(|s| s.to_json_string()).collect::<Vec<_>>().join(","); let dirs = if subs.is_empty() { String::new() } else { format!(",\"dirs\":[{}]", subs) }; let files = files.join(","); let files = if files.is_empty() { String::new() } else { format!(",\"files\":[{}]", files) }; format!( "{{\"name\":\"{name}\"{dirs}{files}}}", name = self.elem.to_str().expect("invalid osstring conversion"), dirs = dirs, files = files ) } } if cx.shared.include_sources { let mut hierarchy = Hierarchy::new(OsString::new()); for source in cx .shared .local_sources .iter() .filter_map(|p| p.0.strip_prefix(&cx.shared.src_root).ok()) { let mut h = &mut hierarchy; let mut elems = source .components() .filter_map(|s| match s { Component::Normal(s) => Some(s.to_owned()), _ => None, }) .peekable(); loop { let cur_elem = elems.next().expect("empty file path"); if elems.peek().is_none() { h.elems.insert(cur_elem); break; } else { let e = cur_elem.clone(); h.children.entry(cur_elem.clone()).or_insert_with(|| Hierarchy::new(e)); h = h.children.get_mut(&cur_elem).expect("not found child"); } } } let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix)); let (mut all_sources, _krates) = try_err!(collect(&dst, &krate.name.as_str(), "sourcesIndex"), &dst); all_sources.push(format!( "sourcesIndex[\"{}\"] = {};", &krate.name, hierarchy.to_json_string() )); all_sources.sort(); let v = format!( "var N = null;var sourcesIndex = {{}};\n{}\ncreateSourceSidebar();\n", all_sources.join("\n") ); cx.shared.fs.write(&dst, v.as_bytes())?; } // Update the search index let dst = cx.dst.join(&format!("search-index{}.js", cx.shared.resource_suffix)); let (mut all_indexes, mut krates) = try_err!(collect_json(&dst, &krate.name.as_str()), &dst); all_indexes.push(search_index); // Sort the indexes by crate so the file will be generated identically even // with rustdoc running in parallel. all_indexes.sort(); { let mut v = String::from("var searchIndex = JSON.parse('{\\\n"); v.push_str(&all_indexes.join(",\\\n")); // "addSearchOptions" has to be called first so the crate filtering can be set before the // search might start (if it's set into the URL for example). v.push_str("\\\n}');\naddSearchOptions(searchIndex);initSearch(searchIndex);"); cx.shared.fs.write(&dst, &v)?; } if options.enable_index_page { if let Some(index_page) = options.index_page.clone() { let mut md_opts = options.clone(); md_opts.output = cx.dst.clone(); md_opts.external_html = (*cx.shared).layout.external_html.clone(); crate::markdown::render(&index_page, md_opts, cx.shared.edition) .map_err(|e| Error::new(e, &index_page))?; } else { let dst = cx.dst.join("index.html"); let page = layout::Page { title: "Index of crates", css_class: "mod", root_path: "./", static_root_path: cx.shared.static_root_path.as_deref(), description: "List of crates", keywords: BASIC_KEYWORDS, resource_suffix: &cx.shared.resource_suffix, extra_scripts: &[], static_extra_scripts: &[], }; krates.push(krate.name.to_string()); krates.sort(); krates.dedup(); let content = format!( "<h1 class=\"fqn\">\ <span class=\"in-band\">List of all crates</span>\ </h1><ul class=\"crate mod\">{}</ul>", krates .iter() .map(|s| { format!( "<li><a class=\"crate mod\" href=\"{}index.html\">{}</a></li>", ensure_trailing_slash(s), s ) }) .collect::<String>() ); let v = layout::render(&cx.shared.layout, &page, "", content, &cx.shared.style_files); cx.shared.fs.write(&dst, v.as_bytes())?; } } // Update the list of all implementors for traits let dst = cx.dst.join("implementors"); for (&did, imps) in &cache.implementors { // Private modules can leak through to this phase of rustdoc, which // could contain implementations for otherwise private types. In some // rare cases we could find an implementation for an item which wasn't // indexed, so we just skip this step in that case. // // FIXME: this is a vague explanation for why this can't be a `get`, in // theory it should be... let &(ref remote_path, remote_item_type) = match cache.paths.get(&did) { Some(p) => p, None => match cache.external_paths.get(&did) { Some(p) => p, None => continue, }, }; #[derive(Serialize)] struct Implementor { text: String, synthetic: bool, types: Vec<String>, } let implementors = imps .iter() .filter_map(|imp| { // If the trait and implementation are in the same crate, then // there's no need to emit information about it (there's inlining // going on). If they're in different crates then the crate defining // the trait will be interested in our implementation. // // If the implementation is from another crate then that crate // should add it. if imp.impl_item.def_id.krate == did.krate || !imp.impl_item.def_id.is_local() { None } else { Some(Implementor { text: imp.inner_impl().print().to_string(), synthetic: imp.inner_impl().synthetic, types: collect_paths_for_type(imp.inner_impl().for_.clone()), }) } }) .collect::<Vec<_>>(); // Only create a js file if we have impls to add to it. If the trait is // documented locally though we always create the file to avoid dead // links. if implementors.is_empty() && !cache.paths.contains_key(&did) { continue; } let implementors = format!( r#"implementors["{}"] = {};"#, krate.name, serde_json::to_string(&implementors).unwrap() ); let mut mydst = dst.clone(); for part in &remote_path[..remote_path.len() - 1] { mydst.push(part); } cx.shared.ensure_dir(&mydst)?; mydst.push(&format!("{}.{}.js", remote_item_type, remote_path[remote_path.len() - 1])); let (mut all_implementors, _) = try_err!(collect(&mydst, &krate.name.as_str(), "implementors"), &mydst); all_implementors.push(implementors); // Sort the implementors by crate so the file will be generated // identically even with rustdoc running in parallel. all_implementors.sort(); let mut v = String::from("(function() {var implementors = {};\n"); for implementor in &all_implementors { writeln!(v, "{}", *implementor).unwrap(); } v.push_str( "if (window.register_implementors) {\ window.register_implementors(implementors);\ } else {\ window.pending_implementors = implementors;\ }", ); v.push_str("})()"); cx.shared.fs.write(&mydst, &v)?; } Ok(()) } fn write_minify( fs: &DocFS, dst: PathBuf, contents: &str, enable_minification: bool, ) -> Result<(), Error> { if enable_minification { if dst.extension() == Some(&OsStr::new("css")) { let res = try_none!(minifier::css::minify(contents).ok(), &dst); fs.write(dst, res.as_bytes()) } else { fs.write(dst, minifier::js::minify(contents).as_bytes()) } } else { fs.write(dst, contents.as_bytes()) } } fn write_srclink(cx: &Context<'_>, item: &clean::Item, buf: &mut Buffer, cache: &Cache) { if let Some(l) = cx.src_href(item, cache) { write!( buf, "<a class=\"srclink\" href=\"{}\" title=\"{}\">[src]</a>", l, "goto source code" ) } } #[derive(Debug, Eq, PartialEq, Hash)] struct ItemEntry { url: String, name: String, } impl ItemEntry { fn new(mut url: String, name: String) -> ItemEntry { while url.starts_with('/') { url.remove(0); } ItemEntry { url, name } } } impl ItemEntry { crate fn print(&self) -> impl fmt::Display + '_ { crate::html::format::display_fn(move |f| { write!(f, "<a href=\"{}\">{}</a>", self.url, Escape(&self.name)) }) } } impl PartialOrd for ItemEntry { fn partial_cmp(&self, other: &ItemEntry) -> Option<::std::cmp::Ordering> { Some(self.cmp(other)) } } impl Ord for ItemEntry { fn cmp(&self, other: &ItemEntry) -> ::std::cmp::Ordering { self.name.cmp(&other.name) } } #[derive(Debug)] struct AllTypes { structs: FxHashSet<ItemEntry>, enums: FxHashSet<ItemEntry>, unions: FxHashSet<ItemEntry>, primitives: FxHashSet<ItemEntry>, traits: FxHashSet<ItemEntry>, macros: FxHashSet<ItemEntry>, functions: FxHashSet<ItemEntry>, typedefs: FxHashSet<ItemEntry>, opaque_tys: FxHashSet<ItemEntry>, statics: FxHashSet<ItemEntry>, constants: FxHashSet<ItemEntry>, keywords: FxHashSet<ItemEntry>, attributes: FxHashSet<ItemEntry>, derives: FxHashSet<ItemEntry>, trait_aliases: FxHashSet<ItemEntry>, } impl AllTypes { fn new() -> AllTypes { let new_set = |cap| FxHashSet::with_capacity_and_hasher(cap, Default::default()); AllTypes { structs: new_set(100), enums: new_set(100), unions: new_set(100), primitives: new_set(26), traits: new_set(100), macros: new_set(100), functions: new_set(100), typedefs: new_set(100), opaque_tys: new_set(100), statics: new_set(100), constants: new_set(100), keywords: new_set(100), attributes: new_set(100), derives: new_set(100), trait_aliases: new_set(100), } } fn append(&mut self, item_name: String, item_type: &ItemType) { let mut url: Vec<_> = item_name.split("::").skip(1).collect(); if let Some(name) = url.pop() { let new_url = format!("{}/{}.{}.html", url.join("/"), item_type, name); url.push(name); let name = url.join("::"); match *item_type { ItemType::Struct => self.structs.insert(ItemEntry::new(new_url, name)), ItemType::Enum => self.enums.insert(ItemEntry::new(new_url, name)), ItemType::Union => self.unions.insert(ItemEntry::new(new_url, name)), ItemType::Primitive => self.primitives.insert(ItemEntry::new(new_url, name)), ItemType::Trait => self.traits.insert(ItemEntry::new(new_url, name)), ItemType::Macro => self.macros.insert(ItemEntry::new(new_url, name)), ItemType::Function => self.functions.insert(ItemEntry::new(new_url, name)), ItemType::Typedef => self.typedefs.insert(ItemEntry::new(new_url, name)), ItemType::OpaqueTy => self.opaque_tys.insert(ItemEntry::new(new_url, name)), ItemType::Static => self.statics.insert(ItemEntry::new(new_url, name)), ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)), ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)), ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)), ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)), _ => true, }; } } } fn print_entries(f: &mut Buffer, e: &FxHashSet<ItemEntry>, title: &str, class: &str) { if !e.is_empty() { let mut e: Vec<&ItemEntry> = e.iter().collect(); e.sort(); write!( f, "<h3 id=\"{}\">{}</h3><ul class=\"{} docblock\">{}</ul>", title, Escape(title), class, e.iter().map(|s| format!("<li>{}</li>", s.print())).collect::<String>() ); } } impl AllTypes { fn print(self, f: &mut Buffer) { write!( f, "<h1 class=\"fqn\">\ <span class=\"out-of-band\">\ <span id=\"render-detail\">\ <a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \ title=\"collapse all docs\">\ [<span class=\"inner\">&#x2212;</span>]\ </a>\ </span> </span> <span class=\"in-band\">List of all items</span>\ </h1>" ); print_entries(f, &self.structs, "Structs", "structs"); print_entries(f, &self.enums, "Enums", "enums"); print_entries(f, &self.unions, "Unions", "unions"); print_entries(f, &self.primitives, "Primitives", "primitives"); print_entries(f, &self.traits, "Traits", "traits"); print_entries(f, &self.macros, "Macros", "macros"); print_entries(f, &self.attributes, "Attribute Macros", "attributes"); print_entries(f, &self.derives, "Derive Macros", "derives"); print_entries(f, &self.functions, "Functions", "functions"); print_entries(f, &self.typedefs, "Typedefs", "typedefs"); print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases"); print_entries(f, &self.opaque_tys, "Opaque Types", "opaque-types"); print_entries(f, &self.statics, "Statics", "statics"); print_entries(f, &self.constants, "Constants", "constants") } } #[derive(Debug)] enum Setting { Section { description: &'static str, sub_settings: Vec<Setting>, }, Toggle { js_data_name: &'static str, description: &'static str, default_value: bool, }, Select { js_data_name: &'static str, description: &'static str, default_value: &'static str, options: Vec<(String, String)>, }, } impl Setting { fn display(&self, root_path: &str, suffix: &str) -> String { match *self { Setting::Section { description, ref sub_settings } => format!( "<div class=\"setting-line\">\ <div class=\"title\">{}</div>\ <div class=\"sub-settings\">{}</div> </div>", description, sub_settings.iter().map(|s| s.display(root_path, suffix)).collect::<String>() ), Setting::Toggle { js_data_name, description, default_value } => format!( "<div class=\"setting-line\">\ <label class=\"toggle\">\ <input type=\"checkbox\" id=\"{}\" {}>\ <span class=\"slider\"></span>\ </label>\ <div>{}</div>\ </div>", js_data_name, if default_value { " checked" } else { "" }, description, ), Setting::Select { js_data_name, description, default_value, ref options } => format!( "<div class=\"setting-line\">\ <div>{}</div>\ <label class=\"select-wrapper\">\ <select id=\"{}\" autocomplete=\"off\">{}</select>\ <img src=\"{}down-arrow{}.svg\" alt=\"Select item\">\ </label>\ </div>", description, js_data_name, options .iter() .map(|opt| format!( "<option value=\"{}\" {}>{}</option>", opt.0, if opt.0 == default_value { "selected" } else { "" }, opt.1, )) .collect::<String>(), root_path, suffix, ), } } } impl From<(&'static str, &'static str, bool)> for Setting { fn from(values: (&'static str, &'static str, bool)) -> Setting { Setting::Toggle { js_data_name: values.0, description: values.1, default_value: values.2 } } } impl<T: Into<Setting>> From<(&'static str, Vec<T>)> for Setting { fn from(values: (&'static str, Vec<T>)) -> Setting { Setting::Section { description: values.0, sub_settings: values.1.into_iter().map(|v| v.into()).collect::<Vec<_>>(), } } } fn settings(root_path: &str, suffix: &str, themes: &[StylePath]) -> Result<String, Error> { let theme_names: Vec<(String, String)> = themes .iter() .map(|entry| { let theme = try_none!(try_none!(entry.path.file_stem(), &entry.path).to_str(), &entry.path) .to_string(); Ok((theme.clone(), theme)) }) .collect::<Result<_, Error>>()?; // (id, explanation, default value) let settings: &[Setting] = &[ ( "Theme preferences", vec![ Setting::from(("use-system-theme", "Use system theme", true)), Setting::Select { js_data_name: "preferred-dark-theme", description: "Preferred dark theme", default_value: "dark", options: theme_names.clone(), }, Setting::Select { js_data_name: "preferred-light-theme", description: "Preferred light theme", default_value: "light", options: theme_names, }, ], ) .into(), ( "Auto-hide item declarations", vec![ ("auto-hide-struct", "Auto-hide structs declaration", true), ("auto-hide-enum", "Auto-hide enums declaration", false), ("auto-hide-union", "Auto-hide unions declaration", true), ("auto-hide-trait", "Auto-hide traits declaration", true), ("auto-hide-macro", "Auto-hide macros declaration", false), ], ) .into(), ("auto-hide-attributes", "Auto-hide item attributes.", true).into(), ("auto-hide-method-docs", "Auto-hide item methods' documentation", false).into(), ("auto-hide-trait-implementations", "Auto-hide trait implementation documentation", true) .into(), ("auto-collapse-implementors", "Auto-hide implementors of a trait", true).into(), ("go-to-only-result", "Directly go to item in search if there is only one result", false) .into(), ("line-numbers", "Show line numbers on code examples", false).into(), ("disable-shortcuts", "Disable keyboard shortcuts", false).into(), ]; Ok(format!( "<h1 class=\"fqn\">\ <span class=\"in-band\">Rustdoc settings</span>\ </h1>\ <div class=\"settings\">{}</div>\ <script src=\"{}settings{}.js\"></script>", settings.iter().map(|s| s.display(root_path, suffix)).collect::<String>(), root_path, suffix )) } impl Context<'_> { fn derive_id(&self, id: String) -> String { let mut map = self.id_map.borrow_mut(); map.derive(id) } /// String representation of how to get back to the root path of the 'doc/' /// folder in terms of a relative URL. fn root_path(&self) -> String { "../".repeat(self.current.len()) } fn render_item(&self, it: &clean::Item, pushname: bool, cache: &Cache) -> String { // A little unfortunate that this is done like this, but it sure // does make formatting *a lot* nicer. CURRENT_DEPTH.with(|slot| { slot.set(self.current.len()); }); let mut title = if it.is_primitive() || it.is_keyword() { // No need to include the namespace for primitive types and keywords String::new() } else { self.current.join("::") }; if pushname { if !title.is_empty() { title.push_str("::"); } title.push_str(&it.name.unwrap().as_str()); } title.push_str(" - Rust"); let tyname = it.type_(); let desc = if it.is_crate() { format!("API documentation for the Rust `{}` crate.", self.shared.layout.krate) } else { format!( "API documentation for the Rust `{}` {} in crate `{}`.", it.name.as_ref().unwrap(), tyname, self.shared.layout.krate ) }; let keywords = make_item_keywords(it); let page = layout::Page { css_class: tyname.as_str(), root_path: &self.root_path(), static_root_path: self.shared.static_root_path.as_deref(), title: &title, description: &desc, keywords: &keywords, resource_suffix: &self.shared.resource_suffix, extra_scripts: &[], static_extra_scripts: &[], }; { self.id_map.borrow_mut().reset(); self.id_map.borrow_mut().populate(initial_ids()); } if !self.render_redirect_pages { layout::render( &self.shared.layout, &page, |buf: &mut _| print_sidebar(self, it, buf, cache), |buf: &mut _| print_item(self, it, buf, cache), &self.shared.style_files, ) } else { let mut url = self.root_path(); if let Some(&(ref names, ty)) = cache.paths.get(&it.def_id) { for name in &names[..names.len() - 1] { url.push_str(name); url.push('/'); } url.push_str(&item_path(ty, names.last().unwrap())); layout::redirect(&url) } else { String::new() } } } /// Construct a map of items shown in the sidebar to a plain-text summary of their docs. fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> { // BTreeMap instead of HashMap to get a sorted output let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new(); for item in &m.items { if item.is_stripped() { continue; } let short = item.type_(); let myname = match item.name { None => continue, Some(ref s) => s.to_string(), }; let short = short.to_string(); map.entry(short).or_default().push(( myname, Some(item.doc_value().map_or_else(String::new, |s| plain_text_summary(&s))), )); } if self.shared.sort_modules_alphabetically { for items in map.values_mut() { items.sort(); } } map } /// Generates a url appropriate for an `href` attribute back to the source of /// this item. /// /// The url generated, when clicked, will redirect the browser back to the /// original source code. /// /// If `None` is returned, then a source link couldn't be generated. This /// may happen, for example, with externally inlined items where the source /// of their crate documentation isn't known. fn src_href(&self, item: &clean::Item, cache: &Cache) -> Option<String> { let mut root = self.root_path(); let mut path = String::new(); let cnum = item.source.cnum(self.sess()); // We can safely ignore synthetic `SourceFile`s. let file = match item.source.filename(self.sess()) { FileName::Real(ref path) => path.local_path().to_path_buf(), _ => return None, }; let file = &file; let symbol; let (krate, path) = if cnum == LOCAL_CRATE { if let Some(path) = self.shared.local_sources.get(file) { (self.shared.layout.krate.as_str(), path) } else { return None; } } else { let (krate, src_root) = match *cache.extern_locations.get(&cnum)? { (name, ref src, ExternalLocation::Local) => (name, src), (name, ref src, ExternalLocation::Remote(ref s)) => { root = s.to_string(); (name, src) } (_, _, ExternalLocation::Unknown) => return None, }; sources::clean_path(&src_root, file, false, |component| { path.push_str(&component.to_string_lossy()); path.push('/'); }); let mut fname = file.file_name().expect("source has no filename").to_os_string(); fname.push(".html"); path.push_str(&fname.to_string_lossy()); symbol = krate.as_str(); (&*symbol, &path) }; let loline = item.source.lo(self.sess()).line; let hiline = item.source.hi(self.sess()).line; let lines = if loline == hiline { loline.to_string() } else { format!("{}-{}", loline, hiline) }; Some(format!( "{root}src/{krate}/{path}#{lines}", root = Escape(&root), krate = krate, path = path, lines = lines )) } } fn wrap_into_docblock<F>(w: &mut Buffer, f: F) where F: FnOnce(&mut Buffer), { write!(w, "<div class=\"docblock type-decl hidden-by-usual-hider\">"); f(w); write!(w, "</div>") } fn print_item(cx: &Context<'_>, item: &clean::Item, buf: &mut Buffer, cache: &Cache) { debug_assert!(!item.is_stripped()); // Write the breadcrumb trail header for the top write!(buf, "<h1 class=\"fqn\"><span class=\"out-of-band\">"); render_stability_since_raw( buf, item.stable_since(cx.tcx()).as_deref(), item.const_stable_since(cx.tcx()).as_deref(), None, None, ); write!( buf, "<span id=\"render-detail\">\ <a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \ title=\"collapse all docs\">\ [<span class=\"inner\">&#x2212;</span>]\ </a>\ </span>" ); // Write `src` tag // // When this item is part of a `crate use` in a downstream crate, the // [src] link in the downstream documentation will actually come back to // this page, and this link will be auto-clicked. The `id` attribute is // used to find the link to auto-click. if cx.shared.include_sources && !item.is_primitive() { write_srclink(cx, item, buf, cache); } write!(buf, "</span>"); // out-of-band write!(buf, "<span class=\"in-band\">"); let name = match *item.kind { clean::ModuleItem(ref m) => { if m.is_crate { "Crate " } else { "Module " } } clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => "Function ", clean::TraitItem(..) => "Trait ", clean::StructItem(..) => "Struct ", clean::UnionItem(..) => "Union ", clean::EnumItem(..) => "Enum ", clean::TypedefItem(..) => "Type Definition ", clean::MacroItem(..) => "Macro ", clean::ProcMacroItem(ref mac) => match mac.kind { MacroKind::Bang => "Macro ", MacroKind::Attr => "Attribute Macro ", MacroKind::Derive => "Derive Macro ", }, clean::PrimitiveItem(..) => "Primitive Type ", clean::StaticItem(..) | clean::ForeignStaticItem(..) => "Static ", clean::ConstantItem(..) => "Constant ", clean::ForeignTypeItem => "Foreign Type ", clean::KeywordItem(..) => "Keyword ", clean::OpaqueTyItem(..) => "Opaque Type ", clean::TraitAliasItem(..) => "Trait Alias ", _ => { // We don't generate pages for any other type. unreachable!(); } }; buf.write_str(name); if !item.is_primitive() && !item.is_keyword() { let cur = &cx.current; let amt = if item.is_mod() { cur.len() - 1 } else { cur.len() }; for (i, component) in cur.iter().enumerate().take(amt) { write!( buf, "<a href=\"{}index.html\">{}</a>::<wbr>", "../".repeat(cur.len() - i - 1), component ); } } write!(buf, "<a class=\"{}\" href=\"\">{}</a>", item.type_(), item.name.as_ref().unwrap()); write!(buf, "</span></h1>"); // in-band match *item.kind { clean::ModuleItem(ref m) => item_module(buf, cx, item, &m.items), clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) => { item_function(buf, cx, item, f) } clean::TraitItem(ref t) => item_trait(buf, cx, item, t, cache), clean::StructItem(ref s) => item_struct(buf, cx, item, s, cache), clean::UnionItem(ref s) => item_union(buf, cx, item, s, cache), clean::EnumItem(ref e) => item_enum(buf, cx, item, e, cache), clean::TypedefItem(ref t, _) => item_typedef(buf, cx, item, t, cache), clean::MacroItem(ref m) => item_macro(buf, cx, item, m), clean::ProcMacroItem(ref m) => item_proc_macro(buf, cx, item, m), clean::PrimitiveItem(_) => item_primitive(buf, cx, item, cache), clean::StaticItem(ref i) | clean::ForeignStaticItem(ref i) => item_static(buf, cx, item, i), clean::ConstantItem(ref c) => item_constant(buf, cx, item, c), clean::ForeignTypeItem => item_foreign_type(buf, cx, item, cache), clean::KeywordItem(_) => item_keyword(buf, cx, item), clean::OpaqueTyItem(ref e) => item_opaque_ty(buf, cx, item, e, cache), clean::TraitAliasItem(ref ta) => item_trait_alias(buf, cx, item, ta, cache), _ => { // We don't generate pages for any other type. unreachable!(); } } } fn item_path(ty: ItemType, name: &str) -> String { match ty { ItemType::Module => format!("{}index.html", ensure_trailing_slash(name)), _ => format!("{}.{}.html", ty, name), } } fn full_path(cx: &Context<'_>, item: &clean::Item) -> String { let mut s = cx.current.join("::"); s.push_str("::"); s.push_str(&item.name.unwrap().as_str()); s } fn document(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, parent: Option<&clean::Item>) { if let Some(ref name) = item.name { info!("Documenting {}", name); } document_item_info(w, cx, item, false, parent); document_full(w, item, cx, "", false); } /// Render md_text as markdown. fn render_markdown( w: &mut Buffer, cx: &Context<'_>, md_text: &str, links: Vec<RenderedLink>, prefix: &str, is_hidden: bool, ) { let mut ids = cx.id_map.borrow_mut(); write!( w, "<div class=\"docblock{}\">{}{}</div>", if is_hidden { " hidden" } else { "" }, prefix, Markdown( md_text, &links, &mut ids, cx.shared.codes, cx.shared.edition, &cx.shared.playground ) .into_string() ) } /// Writes a documentation block containing only the first paragraph of the documentation. If the /// docs are longer, a "Read more" link is appended to the end. fn document_short( w: &mut Buffer, item: &clean::Item, cx: &Context<'_>, link: AssocItemLink<'_>, prefix: &str, is_hidden: bool, parent: Option<&clean::Item>, show_def_docs: bool, ) { document_item_info(w, cx, item, is_hidden, parent); if !show_def_docs { return; } if let Some(s) = item.doc_value() { let mut summary_html = MarkdownSummaryLine(&s, &item.links()).into_string(); if s.contains('\n') { let link = format!(r#" <a href="{}">Read more</a>"#, naive_assoc_href(item, link)); if let Some(idx) = summary_html.rfind("</p>") { summary_html.insert_str(idx, &link); } else { summary_html.push_str(&link); } } write!( w, "<div class='docblock{}'>{}{}</div>", if is_hidden { " hidden" } else { "" }, prefix, summary_html, ); } else if !prefix.is_empty() { write!( w, "<div class=\"docblock{}\">{}</div>", if is_hidden { " hidden" } else { "" }, prefix ); } } fn document_full( w: &mut Buffer, item: &clean::Item, cx: &Context<'_>, prefix: &str, is_hidden: bool, ) { if let Some(s) = cx.shared.maybe_collapsed_doc_value(item) { debug!("Doc block: =====\n{}\n=====", s); render_markdown(w, cx, &*s, item.links(), prefix, is_hidden); } else if !prefix.is_empty() { write!( w, "<div class=\"docblock{}\">{}</div>", if is_hidden { " hidden" } else { "" }, prefix ); } } /// Add extra information about an item such as: /// /// * Stability /// * Deprecated /// * Required features (through the `doc_cfg` feature) fn document_item_info( w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, is_hidden: bool, parent: Option<&clean::Item>, ) { let item_infos = short_item_info(item, cx, parent); if !item_infos.is_empty() { write!(w, "<div class=\"item-info{}\">", if is_hidden { " hidden" } else { "" }); for info in item_infos { write!(w, "{}", info); } write!(w, "</div>"); } } fn document_non_exhaustive_header(item: &clean::Item) -> &str { if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" } } fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) { if item.is_non_exhaustive() { write!(w, "<div class=\"docblock non-exhaustive non-exhaustive-{}\">", { if item.is_struct() { "struct" } else if item.is_enum() { "enum" } else if item.is_variant() { "variant" } else { "type" } }); if item.is_struct() { write!( w, "Non-exhaustive structs could have additional fields added in future. \ Therefore, non-exhaustive structs cannot be constructed in external crates \ using the traditional <code>Struct {{ .. }}</code> syntax; cannot be \ matched against without a wildcard <code>..</code>; and \ struct update syntax will not work." ); } else if item.is_enum() { write!( w, "Non-exhaustive enums could have additional variants added in future. \ Therefore, when matching against variants of non-exhaustive enums, an \ extra wildcard arm must be added to account for any future variants." ); } else if item.is_variant() { write!( w, "Non-exhaustive enum variants could have additional fields added in future. \ Therefore, non-exhaustive enum variants cannot be constructed in external \ crates and cannot be matched against." ); } else { write!( w, "This type will require a wildcard arm in any match statements or constructors." ); } write!(w, "</div>"); } } /// Compare two strings treating multi-digit numbers as single units (i.e. natural sort order). crate fn compare_names(mut lhs: &str, mut rhs: &str) -> Ordering { /// Takes a non-numeric and a numeric part from the given &str. fn take_parts<'a>(s: &mut &'a str) -> (&'a str, &'a str) { let i = s.find(|c: char| c.is_ascii_digit()); let (a, b) = s.split_at(i.unwrap_or(s.len())); let i = b.find(|c: char| !c.is_ascii_digit()); let (b, c) = b.split_at(i.unwrap_or(b.len())); *s = c; (a, b) } while !lhs.is_empty() || !rhs.is_empty() { let (la, lb) = take_parts(&mut lhs); let (ra, rb) = take_parts(&mut rhs); // First process the non-numeric part. match la.cmp(ra) { Ordering::Equal => (), x => return x, } // Then process the numeric part, if both sides have one (and they fit in a u64). if let (Ok(ln), Ok(rn)) = (lb.parse::<u64>(), rb.parse::<u64>()) { match ln.cmp(&rn) { Ordering::Equal => (), x => return x, } } // Then process the numeric part again, but this time as strings. match lb.cmp(rb) { Ordering::Equal => (), x => return x, } } Ordering::Equal } fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[clean::Item]) { document(w, cx, item, None); let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::<Vec<usize>>(); // the order of item types in the listing fn reorder(ty: ItemType) -> u8 { match ty { ItemType::ExternCrate => 0, ItemType::Import => 1, ItemType::Primitive => 2, ItemType::Module => 3, ItemType::Macro => 4, ItemType::Struct => 5, ItemType::Enum => 6, ItemType::Constant => 7, ItemType::Static => 8, ItemType::Trait => 9, ItemType::Function => 10, ItemType::Typedef => 12, ItemType::Union => 13, _ => 14 + ty as u8, } } fn cmp( i1: &clean::Item, i2: &clean::Item, idx1: usize, idx2: usize, tcx: TyCtxt<'_>, ) -> Ordering { let ty1 = i1.type_(); let ty2 = i2.type_(); if ty1 != ty2 { return (reorder(ty1), idx1).cmp(&(reorder(ty2), idx2)); } let s1 = i1.stability(tcx).as_ref().map(|s| s.level); let s2 = i2.stability(tcx).as_ref().map(|s| s.level); if let (Some(a), Some(b)) = (s1, s2) { match (a.is_stable(), b.is_stable()) { (true, true) | (false, false) => {} (false, true) => return Ordering::Less, (true, false) => return Ordering::Greater, } } let lhs = i1.name.unwrap_or(kw::Empty).as_str(); let rhs = i2.name.unwrap_or(kw::Empty).as_str(); compare_names(&lhs, &rhs) } if cx.shared.sort_modules_alphabetically { indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2, cx.tcx())); } // This call is to remove re-export duplicates in cases such as: // // ``` // crate mod foo { // crate mod bar { // crate trait Double { fn foo(); } // } // } // // crate use foo::bar::*; // crate use foo::*; // ``` // // `Double` will appear twice in the generated docs. // // FIXME: This code is quite ugly and could be improved. Small issue: DefId // can be identical even if the elements are different (mostly in imports). // So in case this is an import, we keep everything by adding a "unique id" // (which is the position in the vector). indices.dedup_by_key(|i| { ( items[*i].def_id, if items[*i].name.as_ref().is_some() { Some(full_path(cx, &items[*i])) } else { None }, items[*i].type_(), if items[*i].is_import() { *i } else { 0 }, ) }); debug!("{:?}", indices); let mut curty = None; for &idx in &indices { let myitem = &items[idx]; if myitem.is_stripped() { continue; } let myty = Some(myitem.type_()); if curty == Some(ItemType::ExternCrate) && myty == Some(ItemType::Import) { // Put `extern crate` and `use` re-exports in the same section. curty = myty; } else if myty != curty { if curty.is_some() { write!(w, "</table>"); } curty = myty; let (short, name) = item_ty_to_strs(&myty.unwrap()); write!( w, "<h2 id=\"{id}\" class=\"section-header\">\ <a href=\"#{id}\">{name}</a></h2>\n<table>", id = cx.derive_id(short.to_owned()), name = name ); } match *myitem.kind { clean::ExternCrateItem(ref name, ref src) => { use crate::html::format::anchor; match *src { Some(ref src) => write!( w, "<tr><td><code>{}extern crate {} as {};", myitem.visibility.print_with_space(cx.tcx(), myitem.def_id), anchor(myitem.def_id, &*src.as_str()), name ), None => write!( w, "<tr><td><code>{}extern crate {};", myitem.visibility.print_with_space(cx.tcx(), myitem.def_id), anchor(myitem.def_id, &*name.as_str()) ), } write!(w, "</code></td></tr>"); } clean::ImportItem(ref import) => { write!( w, "<tr><td><code>{}{}</code></td></tr>", myitem.visibility.print_with_space(cx.tcx(), myitem.def_id), import.print() ); } _ => { if myitem.name.is_none() { continue; } let unsafety_flag = match *myitem.kind { clean::FunctionItem(ref func) | clean::ForeignFunctionItem(ref func) if func.header.unsafety == hir::Unsafety::Unsafe => { "<a title=\"unsafe function\" href=\"#\"><sup>⚠</sup></a>" } _ => "", }; let stab = myitem.stability_class(cx.tcx()); let add = if stab.is_some() { " " } else { "" }; let doc_value = myitem.doc_value().unwrap_or_default(); write!( w, "<tr class=\"{stab}{add}module-item\">\ <td><a class=\"{class}\" href=\"{href}\" \ title=\"{title}\">{name}</a>{unsafety_flag}</td>\ <td class=\"docblock-short\">{stab_tags}{docs}</td>\ </tr>", name = *myitem.name.as_ref().unwrap(), stab_tags = extra_info_tags(myitem, item, cx.tcx()), docs = MarkdownSummaryLine(&doc_value, &myitem.links()).into_string(), class = myitem.type_(), add = add, stab = stab.unwrap_or_else(String::new), unsafety_flag = unsafety_flag, href = item_path(myitem.type_(), &myitem.name.unwrap().as_str()), title = [full_path(cx, myitem), myitem.type_().to_string()] .iter() .filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None }) .collect::<Vec<_>>() .join(" "), ); } } } if curty.is_some() { write!(w, "</table>"); } } /// Render the stability, deprecation and portability tags that are displayed in the item's summary /// at the module level. fn extra_info_tags(item: &clean::Item, parent: &clean::Item, tcx: TyCtxt<'_>) -> String { let mut tags = String::new(); fn tag_html(class: &str, title: &str, contents: &str) -> String { format!(r#"<span class="stab {}" title="{}">{}</span>"#, class, Escape(title), contents) } // The trailing space after each tag is to space it properly against the rest of the docs. if let Some(depr) = &item.deprecation(tcx) { let mut message = "Deprecated"; if !stability::deprecation_in_effect( depr.is_since_rustc_version, depr.since.map(|s| s.as_str()).as_deref(), ) { message = "Deprecation planned"; } tags += &tag_html("deprecated", "", message); } // The "rustc_private" crates are permanently unstable so it makes no sense // to render "unstable" everywhere. if item .stability(tcx) .as_ref() .map(|s| s.level.is_unstable() && s.feature != sym::rustc_private) == Some(true) { tags += &tag_html("unstable", "", "Experimental"); } let cfg = match (&item.attrs.cfg, parent.attrs.cfg.as_ref()) { (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg), (cfg, _) => cfg.as_deref().cloned(), }; debug!("Portability {:?} - {:?} = {:?}", item.attrs.cfg, parent.attrs.cfg, cfg); if let Some(ref cfg) = cfg { tags += &tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html()); } tags } fn portability(item: &clean::Item, parent: Option<&clean::Item>) -> Option<String> { let cfg = match (&item.attrs.cfg, parent.and_then(|p| p.attrs.cfg.as_ref())) { (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg), (cfg, _) => cfg.as_deref().cloned(), }; debug!( "Portability {:?} - {:?} = {:?}", item.attrs.cfg, parent.and_then(|p| p.attrs.cfg.as_ref()), cfg ); Some(format!("<div class=\"stab portability\">{}</div>", cfg?.render_long_html())) } /// Render the stability, deprecation and portability information that is displayed at the top of /// the item's documentation. fn short_item_info( item: &clean::Item, cx: &Context<'_>, parent: Option<&clean::Item>, ) -> Vec<String> { let mut extra_info = vec![]; let error_codes = cx.shared.codes; if let Some(Deprecation { note, since, is_since_rustc_version, suggestion: _ }) = item.deprecation(cx.tcx()) { // We display deprecation messages for #[deprecated] and #[rustc_deprecated] // but only display the future-deprecation messages for #[rustc_deprecated]. let mut message = if let Some(since) = since { let since = &since.as_str(); if !stability::deprecation_in_effect(is_since_rustc_version, Some(since)) { if *since == "TBD" { String::from("Deprecating in a future Rust version") } else { format!("Deprecating in {}", Escape(since)) } } else { format!("Deprecated since {}", Escape(since)) } } else { String::from("Deprecated") }; if let Some(note) = note { let note = note.as_str(); let mut ids = cx.id_map.borrow_mut(); let html = MarkdownHtml( &note, &mut ids, error_codes, cx.shared.edition, &cx.shared.playground, ); message.push_str(&format!(": {}", html.into_string())); } extra_info.push(format!( "<div class=\"stab deprecated\"><span class=\"emoji\">👎</span> {}</div>", message, )); } // Render unstable items. But don't render "rustc_private" crates (internal compiler crates). // Those crates are permanently unstable so it makes no sense to render "unstable" everywhere. if let Some((StabilityLevel::Unstable { reason, issue, .. }, feature)) = item .stability(cx.tcx()) .as_ref() .filter(|stab| stab.feature != sym::rustc_private) .map(|stab| (stab.level, stab.feature)) { let mut message = "<span class=\"emoji\">🔬</span> This is a nightly-only experimental API.".to_owned(); let mut feature = format!("<code>{}</code>", Escape(&feature.as_str())); if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) { feature.push_str(&format!( "&nbsp;<a href=\"{url}{issue}\">#{issue}</a>", url = url, issue = issue )); } message.push_str(&format!(" ({})", feature)); if let Some(unstable_reason) = reason { let mut ids = cx.id_map.borrow_mut(); message = format!( "<details><summary>{}</summary>{}</details>", message, MarkdownHtml( &unstable_reason.as_str(), &mut ids, error_codes, cx.shared.edition, &cx.shared.playground, ) .into_string() ); } extra_info.push(format!("<div class=\"stab unstable\">{}</div>", message)); } if let Some(portability) = portability(item, parent) { extra_info.push(portability); } extra_info } fn item_constant(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, c: &clean::Constant) { write!(w, "<pre class=\"rust const\">"); render_attributes(w, it, false); write!( w, "{vis}const {name}: {typ}", vis = it.visibility.print_with_space(cx.tcx(), it.def_id), name = it.name.as_ref().unwrap(), typ = c.type_.print(), ); if c.value.is_some() || c.is_literal { write!(w, " = {expr};", expr = Escape(&c.expr)); } else { write!(w, ";"); } if let Some(value) = &c.value { if !c.is_literal { let value_lowercase = value.to_lowercase(); let expr_lowercase = c.expr.to_lowercase(); if value_lowercase != expr_lowercase && value_lowercase.trim_end_matches("i32") != expr_lowercase { write!(w, " // {value}", value = Escape(value)); } } } write!(w, "</pre>"); document(w, cx, it, None) } fn item_static(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::Static) { write!(w, "<pre class=\"rust static\">"); render_attributes(w, it, false); write!( w, "{vis}static {mutability}{name}: {typ}</pre>", vis = it.visibility.print_with_space(cx.tcx(), it.def_id), mutability = s.mutability.print_with_space(), name = it.name.as_ref().unwrap(), typ = s.type_.print() ); document(w, cx, it, None) } fn item_function(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, f: &clean::Function) { let header_len = format!( "{}{}{}{}{:#}fn {}{:#}", it.visibility.print_with_space(cx.tcx(), it.def_id), f.header.constness.print_with_space(), f.header.asyncness.print_with_space(), f.header.unsafety.print_with_space(), print_abi_with_space(f.header.abi), it.name.as_ref().unwrap(), f.generics.print() ) .len(); write!(w, "<pre class=\"rust fn\">"); render_attributes(w, it, false); write!( w, "{vis}{constness}{asyncness}{unsafety}{abi}fn \ {name}{generics}{decl}{spotlight}{where_clause}</pre>", vis = it.visibility.print_with_space(cx.tcx(), it.def_id), constness = f.header.constness.print_with_space(), asyncness = f.header.asyncness.print_with_space(), unsafety = f.header.unsafety.print_with_space(), abi = print_abi_with_space(f.header.abi), name = it.name.as_ref().unwrap(), generics = f.generics.print(), where_clause = WhereClause { gens: &f.generics, indent: 0, end_newline: true }, decl = Function { decl: &f.decl, header_len, indent: 0, asyncness: f.header.asyncness } .print(), spotlight = spotlight_decl(&f.decl), ); document(w, cx, it, None) } fn render_implementor( cx: &Context<'_>, implementor: &Impl, parent: &clean::Item, w: &mut Buffer, implementor_dups: &FxHashMap<Symbol, (DefId, bool)>, aliases: &[String], cache: &Cache, ) { // If there's already another implementor that has the same abbridged name, use the // full path, for example in `std::iter::ExactSizeIterator` let use_absolute = match implementor.inner_impl().for_ { clean::ResolvedPath { ref path, is_generic: false, .. } | clean::BorrowedRef { type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, .. } => implementor_dups[&path.last()].1, _ => false, }; render_impl( w, cx, implementor, parent, AssocItemLink::Anchor(None), RenderMode::Normal, implementor.impl_item.stable_since(cx.tcx()).as_deref(), implementor.impl_item.const_stable_since(cx.tcx()).as_deref(), false, Some(use_absolute), false, false, aliases, cache, ); } fn render_impls( cx: &Context<'_>, w: &mut Buffer, traits: &[&&Impl], containing_item: &clean::Item, cache: &Cache, ) { let mut impls = traits .iter() .map(|i| { let did = i.trait_did().unwrap(); let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); let mut buffer = if w.is_for_html() { Buffer::html() } else { Buffer::new() }; render_impl( &mut buffer, cx, i, containing_item, assoc_link, RenderMode::Normal, containing_item.stable_since(cx.tcx()).as_deref(), containing_item.const_stable_since(cx.tcx()).as_deref(), true, None, false, true, &[], cache, ); buffer.into_inner() }) .collect::<Vec<_>>(); impls.sort(); w.write_str(&impls.join("")); } fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool) -> String { let mut bounds = String::new(); if !t_bounds.is_empty() { if !trait_alias { bounds.push_str(": "); } for (i, p) in t_bounds.iter().enumerate() { if i > 0 { bounds.push_str(" + "); } bounds.push_str(&p.print().to_string()); } } bounds } fn compare_impl<'a, 'b>(lhs: &'a &&Impl, rhs: &'b &&Impl) -> Ordering { let lhs = format!("{}", lhs.inner_impl().print()); let rhs = format!("{}", rhs.inner_impl().print()); // lhs and rhs are formatted as HTML, which may be unnecessary compare_names(&lhs, &rhs) } fn item_trait(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Trait, cache: &Cache) { let bounds = bounds(&t.bounds, false); let types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>(); let consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>(); let required = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>(); let provided = t.items.iter().filter(|m| m.is_method()).collect::<Vec<_>>(); // Output the trait definition wrap_into_docblock(w, |w| { write!(w, "<pre class=\"rust trait\">"); render_attributes(w, it, true); write!( w, "{}{}{}trait {}{}{}", it.visibility.print_with_space(cx.tcx(), it.def_id), t.unsafety.print_with_space(), if t.is_auto { "auto " } else { "" }, it.name.as_ref().unwrap(), t.generics.print(), bounds ); if !t.generics.where_predicates.is_empty() { write!(w, "{}", WhereClause { gens: &t.generics, indent: 0, end_newline: true }); } else { write!(w, " "); } if t.items.is_empty() { write!(w, "{{ }}"); } else { // FIXME: we should be using a derived_id for the Anchors here write!(w, "{{\n"); for t in &types { render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait, cx); write!(w, ";\n"); } if !types.is_empty() && !consts.is_empty() { w.write_str("\n"); } for t in &consts { render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait, cx); write!(w, ";\n"); } if !consts.is_empty() && !required.is_empty() { w.write_str("\n"); } for (pos, m) in required.iter().enumerate() { render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait, cx); write!(w, ";\n"); if pos < required.len() - 1 { write!(w, "<div class=\"item-spacer\"></div>"); } } if !required.is_empty() && !provided.is_empty() { w.write_str("\n"); } for (pos, m) in provided.iter().enumerate() { render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait, cx); match *m.kind { clean::MethodItem(ref inner, _) if !inner.generics.where_predicates.is_empty() => { write!(w, ",\n {{ ... }}\n"); } _ => { write!(w, " {{ ... }}\n"); } } if pos < provided.len() - 1 { write!(w, "<div class=\"item-spacer\"></div>"); } } write!(w, "}}"); } write!(w, "</pre>") }); // Trait documentation document(w, cx, it, None); fn write_small_section_header(w: &mut Buffer, id: &str, title: &str, extra_content: &str) { write!( w, "<h2 id=\"{0}\" class=\"small-section-header\">\ {1}<a href=\"#{0}\" class=\"anchor\"></a>\ </h2>{2}", id, title, extra_content ) } fn write_loading_content(w: &mut Buffer, extra_content: &str) { write!(w, "{}<span class=\"loading-content\">Loading content...</span>", extra_content) } fn trait_item( w: &mut Buffer, cx: &Context<'_>, m: &clean::Item, t: &clean::Item, cache: &Cache, ) { let name = m.name.as_ref().unwrap(); info!("Documenting {} on {:?}", name, t.name); let item_type = m.type_(); let id = cx.derive_id(format!("{}.{}", item_type, name)); write!(w, "<h3 id=\"{id}\" class=\"method\"><code>", id = id,); render_assoc_item(w, m, AssocItemLink::Anchor(Some(&id)), ItemType::Impl, cx); write!(w, "</code>"); render_stability_since(w, m, t, cx.tcx()); write_srclink(cx, m, w, cache); write!(w, "</h3>"); document(w, cx, m, Some(t)); } if !types.is_empty() {
!consts.is_empty() { write_small_section_header( w, "associated-const", "Associated Constants", "<div class=\"methods\">", ); for t in consts { trait_item(w, cx, t, it, cache); } write_loading_content(w, "</div>"); } // Output the documentation for each function individually if !required.is_empty() { write_small_section_header( w, "required-methods", "Required methods", "<div class=\"methods\">", ); for m in required { trait_item(w, cx, m, it, cache); } write_loading_content(w, "</div>"); } if !provided.is_empty() { write_small_section_header( w, "provided-methods", "Provided methods", "<div class=\"methods\">", ); for m in provided { trait_item(w, cx, m, it, cache); } write_loading_content(w, "</div>"); } // If there are methods directly on this trait object, render them here. render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache); if let Some(implementors) = cache.implementors.get(&it.def_id) { // The DefId is for the first Type found with that name. The bool is // if any Types with the same name but different DefId have been found. let mut implementor_dups: FxHashMap<Symbol, (DefId, bool)> = FxHashMap::default(); for implementor in implementors { match implementor.inner_impl().for_ { clean::ResolvedPath { ref path, did, is_generic: false, .. } | clean::BorrowedRef { type_: box clean::ResolvedPath { ref path, did, is_generic: false, .. }, .. } => { let &mut (prev_did, ref mut has_duplicates) = implementor_dups.entry(path.last()).or_insert((did, false)); if prev_did != did { *has_duplicates = true; } } _ => {} } } let (local, foreign) = implementors.iter().partition::<Vec<_>, _>(|i| { i.inner_impl().for_.def_id().map_or(true, |d| cache.paths.contains_key(&d)) }); let (mut synthetic, mut concrete): (Vec<&&Impl>, Vec<&&Impl>) = local.iter().partition(|i| i.inner_impl().synthetic); synthetic.sort_by(compare_impl); concrete.sort_by(compare_impl); if !foreign.is_empty() { write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", ""); for implementor in foreign { let assoc_link = AssocItemLink::GotoSource( implementor.impl_item.def_id, &implementor.inner_impl().provided_trait_methods, ); render_impl( w, cx, &implementor, it, assoc_link, RenderMode::Normal, implementor.impl_item.stable_since(cx.tcx()).as_deref(), implementor.impl_item.const_stable_since(cx.tcx()).as_deref(), false, None, true, false, &[], cache, ); } write_loading_content(w, ""); } write_small_section_header( w, "implementors", "Implementors", "<div class=\"item-list\" id=\"implementors-list\">", ); for implementor in concrete { render_implementor(cx, implementor, it, w, &implementor_dups, &[], cache); } write_loading_content(w, "</div>"); if t.is_auto { write_small_section_header( w, "synthetic-implementors", "Auto implementors", "<div class=\"item-list\" id=\"synthetic-implementors-list\">", ); for implementor in synthetic { render_implementor( cx, implementor, it, w, &implementor_dups, &collect_paths_for_type(implementor.inner_impl().for_.clone()), cache, ); } write_loading_content(w, "</div>"); } } else { // even without any implementations to write in, we still want the heading and list, so the // implementors javascript file pulled in below has somewhere to write the impls into write_small_section_header( w, "implementors", "Implementors", "<div class=\"item-list\" id=\"implementors-list\">", ); write_loading_content(w, "</div>"); if t.is_auto { write_small_section_header( w, "synthetic-implementors", "Auto implementors", "<div class=\"item-list\" id=\"synthetic-implementors-list\">", ); write_loading_content(w, "</div>"); } } write!( w, "<script type=\"text/javascript\" \ src=\"{root_path}/implementors/{path}/{ty}.{name}.js\" async>\ </script>", root_path = vec![".."; cx.current.len()].join("/"), path = if it.def_id.is_local() { cx.current.join("/") } else { let (ref path, _) = cache.external_paths[&it.def_id]; path[..path.len() - 1].join("/") }, ty = it.type_(), name = *it.name.as_ref().unwrap() ); } fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>) -> String { use crate::formats::item_type::ItemType::*; let name = it.name.as_ref().unwrap(); let ty = match it.type_() { Typedef | AssocType => AssocType, s => s, }; let anchor = format!("#{}.{}", ty, name); match link { AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id), AssocItemLink::Anchor(None) => anchor, AssocItemLink::GotoSource(did, _) => { href(did).map(|p| format!("{}{}", p.0, anchor)).unwrap_or(anchor) } } } fn assoc_const( w: &mut Buffer, it: &clean::Item, ty: &clean::Type, _default: Option<&String>, link: AssocItemLink<'_>, extra: &str, cx: &Context<'_>, ) { write!( w, "{}{}const <a href=\"{}\" class=\"constant\"><b>{}</b></a>: {}", extra, it.visibility.print_with_space(cx.tcx(), it.def_id), naive_assoc_href(it, link), it.name.as_ref().unwrap(), ty.print() ); } fn assoc_type( w: &mut Buffer, it: &clean::Item, bounds: &[clean::GenericBound], default: Option<&clean::Type>, link: AssocItemLink<'_>, extra: &str, ) { write!( w, "{}type <a href=\"{}\" class=\"type\">{}</a>", extra, naive_assoc_href(it, link), it.name.as_ref().unwrap() ); if !bounds.is_empty() { write!(w, ": {}", print_generic_bounds(bounds)) } if let Some(default) = default { write!(w, " = {}", default.print()) } } fn render_stability_since_raw( w: &mut Buffer, ver: Option<&str>, const_ver: Option<&str>, containing_ver: Option<&str>, containing_const_ver: Option<&str>, ) { let ver = ver.and_then(|inner| if !inner.is_empty() { Some(inner) } else { None }); let const_ver = const_ver.and_then(|inner| if !inner.is_empty() { Some(inner) } else { None }); if let Some(v) = ver { if let Some(cv) = const_ver { if const_ver != containing_const_ver { write!( w, "<span class=\"since\" title=\"Stable since Rust version {0}, const since {1}\">{0} (const: {1})</span>", v, cv ); } else if ver != containing_ver { write!( w, "<span class=\"since\" title=\"Stable since Rust version {0}\">{0}</span>", v ); } } else { if ver != containing_ver { write!( w, "<span class=\"since\" title=\"Stable since Rust version {0}\">{0}</span>", v ); } } } } fn render_stability_since( w: &mut Buffer, item: &clean::Item, containing_item: &clean::Item, tcx: TyCtxt<'_>, ) { render_stability_since_raw( w, item.stable_since(tcx).as_deref(), item.const_stable_since(tcx).as_deref(), containing_item.stable_since(tcx).as_deref(), containing_item.const_stable_since(tcx).as_deref(), ) } fn render_assoc_item( w: &mut Buffer, item: &clean::Item, link: AssocItemLink<'_>, parent: ItemType, cx: &Context<'_>, ) { fn method( w: &mut Buffer, meth: &clean::Item, header: hir::FnHeader, g: &clean::Generics, d: &clean::FnDecl, link: AssocItemLink<'_>, parent: ItemType, cx: &Context<'_>, ) { let name = meth.name.as_ref().unwrap(); let anchor = format!("#{}.{}", meth.type_(), name); let href = match link { AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id), AssocItemLink::Anchor(None) => anchor, AssocItemLink::GotoSource(did, provided_methods) => { // We're creating a link from an impl-item to the corresponding // trait-item and need to map the anchored type accordingly. let ty = if provided_methods.contains(&name) { ItemType::Method } else { ItemType::TyMethod }; href(did).map(|p| format!("{}#{}.{}", p.0, ty, name)).unwrap_or(anchor) } }; let mut header_len = format!( "{}{}{}{}{}{:#}fn {}{:#}", meth.visibility.print_with_space(cx.tcx(), meth.def_id), header.constness.print_with_space(), header.asyncness.print_with_space(), header.unsafety.print_with_space(), print_default_space(meth.is_default()), print_abi_with_space(header.abi), name, g.print() ) .len(); let (indent, end_newline) = if parent == ItemType::Trait { header_len += 4; (4, false) } else { (0, true) }; render_attributes(w, meth, false); write!( w, "{}{}{}{}{}{}{}fn <a href=\"{href}\" class=\"fnname\">{name}</a>\ {generics}{decl}{spotlight}{where_clause}", if parent == ItemType::Trait { " " } else { "" }, meth.visibility.print_with_space(cx.tcx(), meth.def_id), header.constness.print_with_space(), header.asyncness.print_with_space(), header.unsafety.print_with_space(), print_default_space(meth.is_default()), print_abi_with_space(header.abi), href = href, name = name, generics = g.print(), decl = Function { decl: d, header_len, indent, asyncness: header.asyncness }.print(), spotlight = spotlight_decl(&d), where_clause = WhereClause { gens: g, indent, end_newline } ) } match *item.kind { clean::StrippedItem(..) => {} clean::TyMethodItem(ref m) => { method(w, item, m.header, &m.generics, &m.decl, link, parent, cx) } clean::MethodItem(ref m, _) => { method(w, item, m.header, &m.generics, &m.decl, link, parent, cx) } clean::AssocConstItem(ref ty, ref default) => assoc_const( w, item, ty, default.as_ref(), link, if parent == ItemType::Trait { " " } else { "" }, cx, ), clean::AssocTypeItem(ref bounds, ref default) => assoc_type( w, item, bounds, default.as_ref(), link, if parent == ItemType::Trait { " " } else { "" }, ), _ => panic!("render_assoc_item called on non-associated-item"), } } fn item_struct( w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::Struct, cache: &Cache, ) { wrap_into_docblock(w, |w| { write!(w, "<pre class=\"rust struct\">"); render_attributes(w, it, true); render_struct(w, it, Some(&s.generics), s.struct_type, &s.fields, "", true, cx); write!(w, "</pre>") }); document(w, cx, it, None); let mut fields = s .fields .iter() .filter_map(|f| match *f.kind { clean::StructFieldItem(ref ty) => Some((f, ty)), _ => None, }) .peekable(); if let doctree::Plain = s.struct_type { if fields.peek().is_some() { write!( w, "<h2 id=\"fields\" class=\"fields small-section-header\"> Fields{}<a href=\"#fields\" class=\"anchor\"></a></h2>", document_non_exhaustive_header(it) ); document_non_exhaustive(w, it); for (field, ty) in fields { let id = cx.derive_id(format!( "{}.{}", ItemType::StructField, field.name.as_ref().unwrap() )); write!( w, "<span id=\"{id}\" class=\"{item_type} small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{name}: {ty}</code>\ </span>", item_type = ItemType::StructField, id = id, name = field.name.as_ref().unwrap(), ty = ty.print() ); document(w, cx, field, Some(it)); } } } render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn item_union(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::Union, cache: &Cache) { wrap_into_docblock(w, |w| { write!(w, "<pre class=\"rust union\">"); render_attributes(w, it, true); render_union(w, it, Some(&s.generics), &s.fields, "", true, cx); write!(w, "</pre>") }); document(w, cx, it, None); let mut fields = s .fields .iter() .filter_map(|f| match *f.kind { clean::StructFieldItem(ref ty) => Some((f, ty)), _ => None, }) .peekable(); if fields.peek().is_some() { write!( w, "<h2 id=\"fields\" class=\"fields small-section-header\"> Fields<a href=\"#fields\" class=\"anchor\"></a></h2>" ); for (field, ty) in fields { let name = field.name.as_ref().expect("union field name"); let id = format!("{}.{}", ItemType::StructField, name); write!( w, "<span id=\"{id}\" class=\"{shortty} small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{name}: {ty}</code>\ </span>", id = id, name = name, shortty = ItemType::StructField, ty = ty.print() ); if let Some(stability_class) = field.stability_class(cx.tcx()) { write!(w, "<span class=\"stab {stab}\"></span>", stab = stability_class); } document(w, cx, field, Some(it)); } } render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn item_enum(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, e: &clean::Enum, cache: &Cache) { wrap_into_docblock(w, |w| { write!(w, "<pre class=\"rust enum\">"); render_attributes(w, it, true); write!( w, "{}enum {}{}{}", it.visibility.print_with_space(cx.tcx(), it.def_id), it.name.as_ref().unwrap(), e.generics.print(), WhereClause { gens: &e.generics, indent: 0, end_newline: true } ); if e.variants.is_empty() && !e.variants_stripped { write!(w, " {{}}"); } else { write!(w, " {{\n"); for v in &e.variants { write!(w, " "); let name = v.name.as_ref().unwrap(); match *v.kind { clean::VariantItem(ref var) => match var.kind { clean::VariantKind::CLike => write!(w, "{}", name), clean::VariantKind::Tuple(ref tys) => { write!(w, "{}(", name); for (i, ty) in tys.iter().enumerate() { if i > 0 { write!(w, ",&nbsp;") } write!(w, "{}", ty.print()); } write!(w, ")"); } clean::VariantKind::Struct(ref s) => { render_struct(w, v, None, s.struct_type, &s.fields, " ", false, cx); } }, _ => unreachable!(), } write!(w, ",\n"); } if e.variants_stripped { write!(w, " // some variants omitted\n"); } write!(w, "}}"); } write!(w, "</pre>") }); document(w, cx, it, None); if !e.variants.is_empty() { write!( w, "<h2 id=\"variants\" class=\"variants small-section-header\"> Variants{}<a href=\"#variants\" class=\"anchor\"></a></h2>\n", document_non_exhaustive_header(it) ); document_non_exhaustive(w, it); for variant in &e.variants { let id = cx.derive_id(format!("{}.{}", ItemType::Variant, variant.name.as_ref().unwrap())); write!( w, "<div id=\"{id}\" class=\"variant small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{name}", id = id, name = variant.name.as_ref().unwrap() ); if let clean::VariantItem(ref var) = *variant.kind { if let clean::VariantKind::Tuple(ref tys) = var.kind { write!(w, "("); for (i, ty) in tys.iter().enumerate() { if i > 0 { write!(w, ",&nbsp;"); } write!(w, "{}", ty.print()); } write!(w, ")"); } } write!(w, "</code></div>"); document(w, cx, variant, Some(it)); document_non_exhaustive(w, variant); use crate::clean::{Variant, VariantKind}; if let clean::VariantItem(Variant { kind: VariantKind::Struct(ref s) }) = *variant.kind { let variant_id = cx.derive_id(format!( "{}.{}.fields", ItemType::Variant, variant.name.as_ref().unwrap() )); write!(w, "<div class=\"autohide sub-variant\" id=\"{id}\">", id = variant_id); write!( w, "<h3>Fields of <b>{name}</b></h3><div>", name = variant.name.as_ref().unwrap() ); for field in &s.fields { use crate::clean::StructFieldItem; if let StructFieldItem(ref ty) = *field.kind { let id = cx.derive_id(format!( "variant.{}.field.{}", variant.name.as_ref().unwrap(), field.name.as_ref().unwrap() )); write!( w, "<span id=\"{id}\" class=\"variant small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{f}:&nbsp;{t}</code>\ </span>", id = id, f = field.name.as_ref().unwrap(), t = ty.print() ); document(w, cx, field, Some(variant)); } } write!(w, "</div></div>"); } render_stability_since(w, variant, it, cx.tcx()); } } render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } const ALLOWED_ATTRIBUTES: &[Symbol] = &[ sym::export_name, sym::lang, sym::link_section, sym::must_use, sym::no_mangle, sym::repr, sym::non_exhaustive, ]; // The `top` parameter is used when generating the item declaration to ensure it doesn't have a // left padding. For example: // // #[foo] <----- "top" attribute // struct Foo { // #[bar] <---- not "top" attribute // bar: usize, // } fn render_attributes(w: &mut Buffer, it: &clean::Item, top: bool) { let attrs = it .attrs .other_attrs .iter() .filter_map(|attr| { if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) { Some(pprust::attribute_to_string(&attr)) } else { None } }) .join("\n"); if !attrs.is_empty() { write!( w, "<span class=\"docblock attributes{}\">{}</span>", if top { " top-attr" } else { "" }, &attrs ); } } fn render_struct( w: &mut Buffer, it: &clean::Item, g: Option<&clean::Generics>, ty: doctree::StructType, fields: &[clean::Item], tab: &str, structhead: bool, cx: &Context<'_>, ) { write!( w, "{}{}{}", it.visibility.print_with_space(cx.tcx(), it.def_id), if structhead { "struct " } else { "" }, it.name.as_ref().unwrap() ); if let Some(g) = g { write!(w, "{}", g.print()) } match ty { doctree::Plain => { if let Some(g) = g { write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: true }) } let mut has_visible_fields = false; write!(w, " {{"); for field in fields { if let clean::StructFieldItem(ref ty) = *field.kind { write!( w, "\n{} {}{}: {},", tab, field.visibility.print_with_space(cx.tcx(), field.def_id), field.name.as_ref().unwrap(), ty.print() ); has_visible_fields = true; } } if has_visible_fields { if it.has_stripped_fields().unwrap() { write!(w, "\n{} // some fields omitted", tab); } write!(w, "\n{}", tab); } else if it.has_stripped_fields().unwrap() { // If there are no visible fields we can just display // `{ /* fields omitted */ }` to save space. write!(w, " /* fields omitted */ "); } write!(w, "}}"); } doctree::Tuple => { write!(w, "("); for (i, field) in fields.iter().enumerate() { if i > 0 { write!(w, ", "); } match *field.kind { clean::StrippedItem(box clean::StructFieldItem(..)) => write!(w, "_"), clean::StructFieldItem(ref ty) => { write!( w, "{}{}", field.visibility.print_with_space(cx.tcx(), field.def_id), ty.print() ) } _ => unreachable!(), } } write!(w, ")"); if let Some(g) = g { write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: false }) } write!(w, ";"); } doctree::Unit => { // Needed for PhantomData. if let Some(g) = g { write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: false }) } write!(w, ";"); } } } fn render_union( w: &mut Buffer, it: &clean::Item, g: Option<&clean::Generics>, fields: &[clean::Item], tab: &str, structhead: bool, cx: &Context<'_>, ) { write!( w, "{}{}{}", it.visibility.print_with_space(cx.tcx(), it.def_id), if structhead { "union " } else { "" }, it.name.as_ref().unwrap() ); if let Some(g) = g { write!(w, "{}", g.print()); write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: true }); } write!(w, " {{\n{}", tab); for field in fields { if let clean::StructFieldItem(ref ty) = *field.kind { write!( w, " {}{}: {},\n{}", field.visibility.print_with_space(cx.tcx(), field.def_id), field.name.as_ref().unwrap(), ty.print(), tab ); } } if it.has_stripped_fields().unwrap() { write!(w, " // some fields omitted\n{}", tab); } write!(w, "}}"); } #[derive(Copy, Clone)] enum AssocItemLink<'a> { Anchor(Option<&'a str>), GotoSource(DefId, &'a FxHashSet<Symbol>), } impl<'a> AssocItemLink<'a> { fn anchor(&self, id: &'a str) -> Self { match *self { AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(&id)), ref other => *other, } } } fn render_assoc_items( w: &mut Buffer, cx: &Context<'_>, containing_item: &clean::Item, it: DefId, what: AssocItemRender<'_>, cache: &Cache, ) { info!("Documenting associated items of {:?}", containing_item.name); let v = match cache.impls.get(&it) { Some(v) => v, None => return, }; let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none()); if !non_trait.is_empty() { let render_mode = match what { AssocItemRender::All => { write!( w, "<h2 id=\"implementations\" class=\"small-section-header\">\ Implementations<a href=\"#implementations\" class=\"anchor\"></a>\ </h2>" ); RenderMode::Normal } AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => { write!( w, "<h2 id=\"deref-methods\" class=\"small-section-header\">\ Methods from {}&lt;Target = {}&gt;\ <a href=\"#deref-methods\" class=\"anchor\"></a>\ </h2>", trait_.print(), type_.print() ); RenderMode::ForDeref { mut_: deref_mut_ } } }; for i in &non_trait { render_impl( w, cx, i, containing_item, AssocItemLink::Anchor(None), render_mode, containing_item.stable_since(cx.tcx()).as_deref(), containing_item.const_stable_since(cx.tcx()).as_deref(), true, None, false, true, &[], cache, ); } } if let AssocItemRender::DerefFor { .. } = what { return; } if !traits.is_empty() { let deref_impl = traits.iter().find(|t| t.inner_impl().trait_.def_id() == cache.deref_trait_did); if let Some(impl_) = deref_impl { let has_deref_mut = traits.iter().any(|t| t.inner_impl().trait_.def_id() == cache.deref_mut_trait_did); render_deref_methods(w, cx, impl_, containing_item, has_deref_mut, cache); } let (synthetic, concrete): (Vec<&&Impl>, Vec<&&Impl>) = traits.iter().partition(|t| t.inner_impl().synthetic); let (blanket_impl, concrete): (Vec<&&Impl>, _) = concrete.into_iter().partition(|t| t.inner_impl().blanket_impl.is_some()); let mut impls = Buffer::empty_from(&w); render_impls(cx, &mut impls, &concrete, containing_item, cache); let impls = impls.into_inner(); if !impls.is_empty() { write!( w, "<h2 id=\"trait-implementations\" class=\"small-section-header\">\ Trait Implementations<a href=\"#trait-implementations\" class=\"anchor\"></a>\ </h2>\ <div id=\"trait-implementations-list\">{}</div>", impls ); } if !synthetic.is_empty() { write!( w, "<h2 id=\"synthetic-implementations\" class=\"small-section-header\">\ Auto Trait Implementations\ <a href=\"#synthetic-implementations\" class=\"anchor\"></a>\ </h2>\ <div id=\"synthetic-implementations-list\">" ); render_impls(cx, w, &synthetic, containing_item, cache); write!(w, "</div>"); } if !blanket_impl.is_empty() { write!( w, "<h2 id=\"blanket-implementations\" class=\"small-section-header\">\ Blanket Implementations\ <a href=\"#blanket-implementations\" class=\"anchor\"></a>\ </h2>\ <div id=\"blanket-implementations-list\">" ); render_impls(cx, w, &blanket_impl, containing_item, cache); write!(w, "</div>"); } } } fn render_deref_methods( w: &mut Buffer, cx: &Context<'_>, impl_: &Impl, container_item: &clean::Item, deref_mut: bool, cache: &Cache, ) { let deref_type = impl_.inner_impl().trait_.as_ref().unwrap(); let (target, real_target) = impl_ .inner_impl() .items .iter() .find_map(|item| match *item.kind { clean::TypedefItem(ref t, true) => Some(match *t { clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_), _ => (&t.type_, &t.type_), }), _ => None, }) .expect("Expected associated type binding"); let what = AssocItemRender::DerefFor { trait_: deref_type, type_: real_target, deref_mut_: deref_mut }; if let Some(did) = target.def_id() { render_assoc_items(w, cx, container_item, did, what, cache); } else { if let Some(prim) = target.primitive_type() { if let Some(&did) = cache.primitive_locations.get(&prim) { render_assoc_items(w, cx, container_item, did, what, cache); } } } } fn should_render_item(item: &clean::Item, deref_mut_: bool) -> bool { let self_type_opt = match *item.kind { clean::MethodItem(ref method, _) => method.decl.self_type(), clean::TyMethodItem(ref method) => method.decl.self_type(), _ => None, }; if let Some(self_ty) = self_type_opt { let (by_mut_ref, by_box, by_value) = match self_ty { SelfTy::SelfBorrowed(_, mutability) | SelfTy::SelfExplicit(clean::BorrowedRef { mutability, .. }) => { (mutability == Mutability::Mut, false, false) } SelfTy::SelfExplicit(clean::ResolvedPath { did, .. }) => { (false, Some(did) == cache().owned_box_did, false) } SelfTy::SelfValue => (false, false, true), _ => (false, false, false), }; (deref_mut_ || !by_mut_ref) && !by_box && !by_value } else { false } } fn spotlight_decl(decl: &clean::FnDecl) -> String { let mut out = Buffer::html(); let mut trait_ = String::new(); if let Some(did) = decl.output.def_id() { let c = cache(); if let Some(impls) = c.impls.get(&did) { for i in impls { let impl_ = i.inner_impl(); if impl_.trait_.def_id().map_or(false, |d| c.traits[&d].is_spotlight) { if out.is_empty() { out.push_str(&format!( "<h3 class=\"notable\">Notable traits for {}</h3>\ <code class=\"content\">", impl_.for_.print() )); trait_.push_str(&impl_.for_.print().to_string()); } //use the "where" class here to make it small out.push_str(&format!( "<span class=\"where fmt-newline\">{}</span>", impl_.print() )); let t_did = impl_.trait_.def_id().unwrap(); for it in &impl_.items { if let clean::TypedefItem(ref tydef, _) = *it.kind { out.push_str("<span class=\"where fmt-newline\"> "); assoc_type( &mut out, it, &[], Some(&tydef.type_), AssocItemLink::GotoSource(t_did, &FxHashSet::default()), "", ); out.push_str(";</span>"); } } } } } } if !out.is_empty() { out.insert_str( 0, "<span class=\"notable-traits\"><span class=\"notable-traits-tooltip\">ⓘ\ <div class=\"notable-traits-tooltiptext\"><span class=\"docblock\">", ); out.push_str("</code></span></div></span></span>"); } out.into_inner() } fn render_impl( w: &mut Buffer, cx: &Context<'_>, i: &Impl, parent: &clean::Item, link: AssocItemLink<'_>, render_mode: RenderMode, outer_version: Option<&str>, outer_const_version: Option<&str>, show_def_docs: bool, use_absolute: Option<bool>, is_on_foreign_type: bool, show_default_items: bool, // This argument is used to reference same type with different paths to avoid duplication // in documentation pages for trait with automatic implementations like "Send" and "Sync". aliases: &[String], cache: &Cache, ) { let traits = &cache.traits; let trait_ = i.trait_did().map(|did| &traits[&did]); if render_mode == RenderMode::Normal { let id = cx.derive_id(match i.inner_impl().trait_ { Some(ref t) => { if is_on_foreign_type { get_id_for_impl_on_foreign_type(&i.inner_impl().for_, t) } else { format!("impl-{}", small_url_encode(&format!("{:#}", t.print()))) } } None => "impl".to_string(), }); let aliases = if aliases.is_empty() { String::new() } else { format!(" aliases=\"{}\"", aliases.join(",")) }; if let Some(use_absolute) = use_absolute { write!(w, "<h3 id=\"{}\" class=\"impl\"{}><code class=\"in-band\">", id, aliases); fmt_impl_for_trait_page(&i.inner_impl(), w, use_absolute); if show_def_docs { for it in &i.inner_impl().items { if let clean::TypedefItem(ref tydef, _) = *it.kind { write!(w, "<span class=\"where fmt-newline\"> "); assoc_type(w, it, &[], Some(&tydef.type_), AssocItemLink::Anchor(None), ""); write!(w, ";</span>"); } } } write!(w, "</code>"); } else { write!( w, "<h3 id=\"{}\" class=\"impl\"{}><code class=\"in-band\">{}</code>", id, aliases, i.inner_impl().print() ); } write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id); render_stability_since_raw( w, i.impl_item.stable_since(cx.tcx()).as_deref(), i.impl_item.const_stable_since(cx.tcx()).as_deref(), outer_version, outer_const_version, ); write_srclink(cx, &i.impl_item, w, cache); write!(w, "</h3>"); if trait_.is_some() { if let Some(portability) = portability(&i.impl_item, Some(parent)) { write!(w, "<div class=\"item-info\">{}</div>", portability); } } if let Some(ref dox) = cx.shared.maybe_collapsed_doc_value(&i.impl_item) { let mut ids = cx.id_map.borrow_mut(); write!( w, "<div class=\"docblock\">{}</div>", Markdown( &*dox, &i.impl_item.links(), &mut ids, cx.shared.codes, cx.shared.edition, &cx.shared.playground ) .into_string() ); } } fn doc_impl_item( w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, parent: &clean::Item, link: AssocItemLink<'_>, render_mode: RenderMode, is_default_item: bool, outer_version: Option<&str>, outer_const_version: Option<&str>, trait_: Option<&clean::Trait>, show_def_docs: bool, cache: &Cache, ) { let item_type = item.type_(); let name = item.name.as_ref().unwrap(); let render_method_item = match render_mode { RenderMode::Normal => true, RenderMode::ForDeref { mut_: deref_mut_ } => should_render_item(&item, deref_mut_), }; let (is_hidden, extra_class) = if (trait_.is_none() || item.doc_value().is_some() || item.kind.is_type_alias()) && !is_default_item { (false, "") } else { (true, " hidden") }; match *item.kind { clean::MethodItem(..) | clean::TyMethodItem(_) => { // Only render when the method is not static or we allow static methods if render_method_item { let id = cx.derive_id(format!("{}.{}", item_type, name)); write!(w, "<h4 id=\"{}\" class=\"{}{}\">", id, item_type, extra_class); write!(w, "<code>"); render_assoc_item(w, item, link.anchor(&id), ItemType::Impl, cx); write!(w, "</code>"); render_stability_since_raw( w, item.stable_since(cx.tcx()).as_deref(), item.const_stable_since(cx.tcx()).as_deref(), outer_version, outer_const_version, ); write_srclink(cx, item, w, cache); write!(w, "</h4>"); } } clean::TypedefItem(ref tydef, _) => { let id = cx.derive_id(format!("{}.{}", ItemType::AssocType, name)); write!(w, "<h4 id=\"{}\" class=\"{}{}\"><code>", id, item_type, extra_class); assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id), ""); write!(w, "</code></h4>"); } clean::AssocConstItem(ref ty, ref default) => { let id = cx.derive_id(format!("{}.{}", item_type, name)); write!(w, "<h4 id=\"{}\" class=\"{}{}\"><code>", id, item_type, extra_class); assoc_const(w, item, ty, default.as_ref(), link.anchor(&id), "", cx); write!(w, "</code>"); render_stability_since_raw( w, item.stable_since(cx.tcx()).as_deref(), item.const_stable_since(cx.tcx()).as_deref(), outer_version, outer_const_version, ); write_srclink(cx, item, w, cache); write!(w, "</h4>"); } clean::AssocTypeItem(ref bounds, ref default) => { let id = cx.derive_id(format!("{}.{}", item_type, name)); write!(w, "<h4 id=\"{}\" class=\"{}{}\"><code>", id, item_type, extra_class); assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id), ""); write!(w, "</code></h4>"); } clean::StrippedItem(..) => return, _ => panic!("can't make docs for trait item with name {:?}", item.name), } if render_method_item { if !is_default_item { if let Some(t) = trait_ { // The trait item may have been stripped so we might not // find any documentation or stability for it. if let Some(it) = t.items.iter().find(|i| i.name == item.name) { // We need the stability of the item from the trait // because impls can't have a stability. if item.doc_value().is_some() { document_item_info(w, cx, it, is_hidden, Some(parent)); document_full(w, item, cx, "", is_hidden); } else { // In case the item isn't documented, // provide short documentation from the trait. document_short( w, it, cx, link, "", is_hidden, Some(parent), show_def_docs, ); } } } else { document_item_info(w, cx, item, is_hidden, Some(parent)); if show_def_docs { document_full(w, item, cx, "", is_hidden); } } } else { document_short(w, item, cx, link, "", is_hidden, Some(parent), show_def_docs); } } } write!(w, "<div class=\"impl-items\">"); for trait_item in &i.inner_impl().items { doc_impl_item( w, cx, trait_item, if trait_.is_some() { &i.impl_item } else { parent }, link, render_mode, false, outer_version, outer_const_version, trait_, show_def_docs, cache, ); } fn render_default_items( w: &mut Buffer, cx: &Context<'_>, t: &clean::Trait, i: &clean::Impl, parent: &clean::Item, render_mode: RenderMode, outer_version: Option<&str>, outer_const_version: Option<&str>, show_def_docs: bool, cache: &Cache, ) { for trait_item in &t.items { let n = trait_item.name; if i.items.iter().any(|m| m.name == n) { continue; } let did = i.trait_.as_ref().unwrap().def_id().unwrap(); let assoc_link = AssocItemLink::GotoSource(did, &i.provided_trait_methods); doc_impl_item( w, cx, trait_item, parent, assoc_link, render_mode, true, outer_version, outer_const_version, None, show_def_docs, cache, ); } } // If we've implemented a trait, then also emit documentation for all // default items which weren't overridden in the implementation block. // We don't emit documentation for default items if they appear in the // Implementations on Foreign Types or Implementors sections. if show_default_items { if let Some(t) = trait_ { render_default_items( w, cx, t, &i.inner_impl(), &i.impl_item, render_mode, outer_version, outer_const_version, show_def_docs, cache, ); } } write!(w, "</div>"); } fn item_opaque_ty( w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::OpaqueTy, cache: &Cache, ) { write!(w, "<pre class=\"rust opaque\">"); render_attributes(w, it, false); write!( w, "type {}{}{where_clause} = impl {bounds};</pre>", it.name.as_ref().unwrap(), t.generics.print(), where_clause = WhereClause { gens: &t.generics, indent: 0, end_newline: true }, bounds = bounds(&t.bounds, false) ); document(w, cx, it, None); // Render any items associated directly to this alias, as otherwise they // won't be visible anywhere in the docs. It would be nice to also show // associated items from the aliased type (see discussion in #32077), but // we need #14072 to make sense of the generics. render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn item_trait_alias( w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::TraitAlias, cache: &Cache, ) { write!(w, "<pre class=\"rust trait-alias\">"); render_attributes(w, it, false); write!( w, "trait {}{}{} = {};</pre>", it.name.as_ref().unwrap(), t.generics.print(), WhereClause { gens: &t.generics, indent: 0, end_newline: true }, bounds(&t.bounds, true) ); document(w, cx, it, None); // Render any items associated directly to this alias, as otherwise they // won't be visible anywhere in the docs. It would be nice to also show // associated items from the aliased type (see discussion in #32077), but // we need #14072 to make sense of the generics. render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn item_typedef( w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef, cache: &Cache, ) { write!(w, "<pre class=\"rust typedef\">"); render_attributes(w, it, false); write!( w, "type {}{}{where_clause} = {type_};</pre>", it.name.as_ref().unwrap(), t.generics.print(), where_clause = WhereClause { gens: &t.generics, indent: 0, end_newline: true }, type_ = t.type_.print() ); document(w, cx, it, None); // Render any items associated directly to this alias, as otherwise they // won't be visible anywhere in the docs. It would be nice to also show // associated items from the aliased type (see discussion in #32077), but // we need #14072 to make sense of the generics. render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn item_foreign_type(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, cache: &Cache) { writeln!(w, "<pre class=\"rust foreigntype\">extern {{"); render_attributes(w, it, false); write!( w, " {}type {};\n}}</pre>", it.visibility.print_with_space(cx.tcx(), it.def_id), it.name.as_ref().unwrap(), ); document(w, cx, it, None); render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer, cache: &Cache) { let parentlen = cx.current.len() - if it.is_mod() { 1 } else { 0 }; if it.is_struct() || it.is_trait() || it.is_primitive() || it.is_union() || it.is_enum() || it.is_mod() || it.is_typedef() { write!( buffer, "<p class=\"location\">{}{}</p>", match *it.kind { clean::StructItem(..) => "Struct ", clean::TraitItem(..) => "Trait ", clean::PrimitiveItem(..) => "Primitive Type ", clean::UnionItem(..) => "Union ", clean::EnumItem(..) => "Enum ", clean::TypedefItem(..) => "Type Definition ", clean::ForeignTypeItem => "Foreign Type ", clean::ModuleItem(..) => if it.is_crate() { "Crate " } else { "Module " }, _ => "", }, it.name.as_ref().unwrap() ); } if it.is_crate() { if let Some(ref version) = cache.crate_version { write!( buffer, "<div class=\"block version\">\ <p>Version {}</p>\ </div>", Escape(version) ); } } write!(buffer, "<div class=\"sidebar-elems\">"); if it.is_crate() { write!( buffer, "<a id=\"all-types\" href=\"all.html\"><p>See all {}'s items</p></a>", it.name.as_ref().expect("crates always have a name") ); } match *it.kind { clean::StructItem(ref s) => sidebar_struct(buffer, it, s), clean::TraitItem(ref t) => sidebar_trait(buffer, it, t), clean::PrimitiveItem(_) => sidebar_primitive(buffer, it), clean::UnionItem(ref u) => sidebar_union(buffer, it, u), clean::EnumItem(ref e) => sidebar_enum(buffer, it, e), clean::TypedefItem(_, _) => sidebar_typedef(buffer, it), clean::ModuleItem(ref m) => sidebar_module(buffer, &m.items), clean::ForeignTypeItem => sidebar_foreign_type(buffer, it), _ => (), } // The sidebar is designed to display sibling functions, modules and // other miscellaneous information. since there are lots of sibling // items (and that causes quadratic growth in large modules), // we refactor common parts into a shared JavaScript file per module. // still, we don't move everything into JS because we want to preserve // as much HTML as possible in order to allow non-JS-enabled browsers // to navigate the documentation (though slightly inefficiently). write!(buffer, "<p class=\"location\">"); for (i, name) in cx.current.iter().take(parentlen).enumerate() { if i > 0 { write!(buffer, "::<wbr>"); } write!( buffer, "<a href=\"{}index.html\">{}</a>", &cx.root_path()[..(cx.current.len() - i - 1) * 3], *name ); } write!(buffer, "</p>"); // Sidebar refers to the enclosing module, not this module. let relpath = if it.is_mod() { "../" } else { "" }; write!( buffer, "<script>window.sidebarCurrent = {{\ name: \"{name}\", \ ty: \"{ty}\", \ relpath: \"{path}\"\ }};</script>", name = it.name.unwrap_or(kw::Empty), ty = it.type_(), path = relpath ); if parentlen == 0 { // There is no sidebar-items.js beyond the crate root path // FIXME maybe dynamic crate loading can be merged here } else { write!(buffer, "<script defer src=\"{path}sidebar-items.js\"></script>", path = relpath); } // Closes sidebar-elems div. write!(buffer, "</div>"); } fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String { if used_links.insert(url.clone()) { return url; } let mut add = 1; while !used_links.insert(format!("{}-{}", url, add)) { add += 1; } format!("{}-{}", url, add) } fn get_methods( i: &clean::Impl, for_deref: bool, used_links: &mut FxHashSet<String>, deref_mut: bool, ) -> Vec<String> { i.items .iter() .filter_map(|item| match item.name { Some(ref name) if !name.is_empty() && item.is_method() => { if !for_deref || should_render_item(item, deref_mut) { Some(format!( "<a href=\"#{}\">{}</a>", get_next_url(used_links, format!("method.{}", name)), name )) } else { None } } _ => None, }) .collect::<Vec<_>>() } // The point is to url encode any potential character from a type with genericity. fn small_url_encode(s: &str) -> String { s.replace("<", "%3C") .replace(">", "%3E") .replace(" ", "%20") .replace("?", "%3F") .replace("'", "%27") .replace("&", "%26") .replace(",", "%2C") .replace(":", "%3A") .replace(";", "%3B") .replace("[", "%5B") .replace("]", "%5D") .replace("\"", "%22") } fn sidebar_assoc_items(it: &clean::Item) -> String { let mut out = String::new(); let c = cache(); if let Some(v) = c.impls.get(&it.def_id) { let mut used_links = FxHashSet::default(); { let used_links_bor = &mut used_links; let mut ret = v .iter() .filter(|i| i.inner_impl().trait_.is_none()) .flat_map(move |i| get_methods(i.inner_impl(), false, used_links_bor, false)) .collect::<Vec<_>>(); if !ret.is_empty() { // We want links' order to be reproducible so we don't use unstable sort. ret.sort(); out.push_str(&format!( "<a class=\"sidebar-title\" href=\"#implementations\">Methods</a>\ <div class=\"sidebar-links\">{}</div>", ret.join("") )); } } if v.iter().any(|i| i.inner_impl().trait_.is_some()) { if let Some(impl_) = v .iter() .filter(|i| i.inner_impl().trait_.is_some()) .find(|i| i.inner_impl().trait_.def_id() == c.deref_trait_did) { debug!("found Deref: {:?}", impl_); if let Some((target, real_target)) = impl_.inner_impl().items.iter().find_map(|item| match *item.kind { clean::TypedefItem(ref t, true) => Some(match *t { clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_), _ => (&t.type_, &t.type_), }), _ => None, }) { debug!("found target, real_target: {:?} {:?}", target, real_target); let deref_mut = v .iter() .filter(|i| i.inner_impl().trait_.is_some()) .any(|i| i.inner_impl().trait_.def_id() == c.deref_mut_trait_did); let inner_impl = target .def_id() .or_else(|| { target .primitive_type() .and_then(|prim| c.primitive_locations.get(&prim).cloned()) }) .and_then(|did| c.impls.get(&did)); if let Some(impls) = inner_impl { debug!("found inner_impl: {:?}", impls); out.push_str("<a class=\"sidebar-title\" href=\"#deref-methods\">"); out.push_str(&format!( "Methods from {}&lt;Target={}&gt;", Escape(&format!( "{:#}", impl_.inner_impl().trait_.as_ref().unwrap().print() )), Escape(&format!("{:#}", real_target.print())) )); out.push_str("</a>"); let mut ret = impls .iter() .filter(|i| i.inner_impl().trait_.is_none()) .flat_map(|i| { get_methods(i.inner_impl(), true, &mut used_links, deref_mut) }) .collect::<Vec<_>>(); // We want links' order to be reproducible so we don't use unstable sort. ret.sort(); if !ret.is_empty() { out.push_str(&format!( "<div class=\"sidebar-links\">{}</div>", ret.join("") )); } } } } let format_impls = |impls: Vec<&Impl>| { let mut links = FxHashSet::default(); let mut ret = impls .iter() .filter_map(|i| { let is_negative_impl = is_negative_impl(i.inner_impl()); if let Some(ref i) = i.inner_impl().trait_ { let i_display = format!("{:#}", i.print()); let out = Escape(&i_display); let encoded = small_url_encode(&format!("{:#}", i.print())); let generated = format!( "<a href=\"#impl-{}\">{}{}</a>", encoded, if is_negative_impl { "!" } else { "" }, out ); if links.insert(generated.clone()) { Some(generated) } else { None } } else { None } }) .collect::<Vec<String>>(); ret.sort(); ret.join("") }; let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) = v.iter().partition::<Vec<_>, _>(|i| i.inner_impl().synthetic); let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) = concrete .into_iter() .partition::<Vec<_>, _>(|i| i.inner_impl().blanket_impl.is_some()); let concrete_format = format_impls(concrete); let synthetic_format = format_impls(synthetic); let blanket_format = format_impls(blanket_impl); if !concrete_format.is_empty() { out.push_str( "<a class=\"sidebar-title\" href=\"#trait-implementations\">\ Trait Implementations</a>", ); out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", concrete_format)); } if !synthetic_format.is_empty() { out.push_str( "<a class=\"sidebar-title\" href=\"#synthetic-implementations\">\ Auto Trait Implementations</a>", ); out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", synthetic_format)); } if !blanket_format.is_empty() { out.push_str( "<a class=\"sidebar-title\" href=\"#blanket-implementations\">\ Blanket Implementations</a>", ); out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", blanket_format)); } } } out } fn sidebar_struct(buf: &mut Buffer, it: &clean::Item, s: &clean::Struct) { let mut sidebar = String::new(); let fields = get_struct_fields_name(&s.fields); if !fields.is_empty() { if let doctree::Plain = s.struct_type { sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#fields\">Fields</a>\ <div class=\"sidebar-links\">{}</div>", fields )); } } sidebar.push_str(&sidebar_assoc_items(it)); if !sidebar.is_empty() { write!(buf, "<div class=\"block items\">{}</div>", sidebar); } } fn get_id_for_impl_on_foreign_type(for_: &clean::Type, trait_: &clean::Type) -> String { small_url_encode(&format!("impl-{:#}-for-{:#}", trait_.print(), for_.print())) } fn extract_for_impl_name(item: &clean::Item) -> Option<(String, String)> { match *item.kind { clean::ItemKind::ImplItem(ref i) => { if let Some(ref trait_) = i.trait_ { Some(( format!("{:#}", i.for_.print()), get_id_for_impl_on_foreign_type(&i.for_, trait_), )) } else { None } } _ => None, } } fn is_negative_impl(i: &clean::Impl) -> bool { i.polarity == Some(clean::ImplPolarity::Negative) } fn sidebar_trait(buf: &mut Buffer, it: &clean::Item, t: &clean::Trait) { let mut sidebar = String::new(); let mut types = t .items .iter() .filter_map(|m| match m.name { Some(ref name) if m.is_associated_type() => { Some(format!("<a href=\"#associatedtype.{name}\">{name}</a>", name = name)) } _ => None, }) .collect::<Vec<_>>(); let mut consts = t .items .iter() .filter_map(|m| match m.name { Some(ref name) if m.is_associated_const() => { Some(format!("<a href=\"#associatedconstant.{name}\">{name}</a>", name = name)) } _ => None, }) .collect::<Vec<_>>(); let mut required = t .items .iter() .filter_map(|m| match m.name { Some(ref name) if m.is_ty_method() => { Some(format!("<a href=\"#tymethod.{name}\">{name}</a>", name = name)) } _ => None, }) .collect::<Vec<String>>(); let mut provided = t .items .iter() .filter_map(|m| match m.name { Some(ref name) if m.is_method() => { Some(format!("<a href=\"#method.{0}\">{0}</a>", name)) } _ => None, }) .collect::<Vec<String>>(); if !types.is_empty() { types.sort(); sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#associated-types\">\ Associated Types</a><div class=\"sidebar-links\">{}</div>", types.join("") )); } if !consts.is_empty() { consts.sort(); sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#associated-const\">\ Associated Constants</a><div class=\"sidebar-links\">{}</div>", consts.join("") )); } if !required.is_empty() { required.sort(); sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#required-methods\">\ Required Methods</a><div class=\"sidebar-links\">{}</div>", required.join("") )); } if !provided.is_empty() { provided.sort(); sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#provided-methods\">\ Provided Methods</a><div class=\"sidebar-links\">{}</div>", provided.join("") )); } let c = cache(); if let Some(implementors) = c.implementors.get(&it.def_id) { let mut res = implementors .iter() .filter(|i| i.inner_impl().for_.def_id().map_or(false, |d| !c.paths.contains_key(&d))) .filter_map(|i| extract_for_impl_name(&i.impl_item)) .collect::<Vec<_>>(); if !res.is_empty() { res.sort(); sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#foreign-impls\">\ Implementations on Foreign Types</a>\ <div class=\"sidebar-links\">{}</div>", res.into_iter() .map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(&name))) .collect::<Vec<_>>() .join("") )); } } sidebar.push_str(&sidebar_assoc_items(it)); sidebar.push_str("<a class=\"sidebar-title\" href=\"#implementors\">Implementors</a>"); if t.is_auto { sidebar.push_str( "<a class=\"sidebar-title\" \ href=\"#synthetic-implementors\">Auto Implementors</a>", ); } write!(buf, "<div class=\"block items\">{}</div>", sidebar) } fn sidebar_primitive(buf: &mut Buffer, it: &clean::Item) { let sidebar = sidebar_assoc_items(it); if !sidebar.is_empty() { write!(buf, "<div class=\"block items\">{}</div>", sidebar); } } fn sidebar_typedef(buf: &mut Buffer, it: &clean::Item) { let sidebar = sidebar_assoc_items(it); if !sidebar.is_empty() { write!(buf, "<div class=\"block items\">{}</div>", sidebar); } } fn get_struct_fields_name(fields: &[clean::Item]) -> String { let mut fields = fields .iter() .filter(|f| matches!(*f.kind, clean::StructFieldItem(..))) .filter_map(|f| match f.name { Some(ref name) => { Some(format!("<a href=\"#structfield.{name}\">{name}</a>", name = name)) } _ => None, }) .collect::<Vec<_>>(); fields.sort(); fields.join("") } fn sidebar_union(buf: &mut Buffer, it: &clean::Item, u: &clean::Union) { let mut sidebar = String::new(); let fields = get_struct_fields_name(&u.fields); if !fields.is_empty() { sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#fields\">Fields</a>\ <div class=\"sidebar-links\">{}</div>", fields )); } sidebar.push_str(&sidebar_assoc_items(it)); if !sidebar.is_empty() { write!(buf, "<div class=\"block items\">{}</div>", sidebar); } } fn sidebar_enum(buf: &mut Buffer, it: &clean::Item, e: &clean::Enum) { let mut sidebar = String::new(); let mut variants = e .variants .iter() .filter_map(|v| match v.name { Some(ref name) => Some(format!("<a href=\"#variant.{name}\">{name}</a>", name = name)), _ => None, }) .collect::<Vec<_>>(); if !variants.is_empty() { variants.sort_unstable(); sidebar.push_str(&format!( "<a class=\"sidebar-title\" href=\"#variants\">Variants</a>\ <div class=\"sidebar-links\">{}</div>", variants.join(""), )); } sidebar.push_str(&sidebar_assoc_items(it)); if !sidebar.is_empty() { write!(buf, "<div class=\"block items\">{}</div>", sidebar); } } fn item_ty_to_strs(ty: &ItemType) -> (&'static str, &'static str) { match *ty { ItemType::ExternCrate | ItemType::Import => ("reexports", "Re-exports"), ItemType::Module => ("modules", "Modules"), ItemType::Struct => ("structs", "Structs"), ItemType::Union => ("unions", "Unions"), ItemType::Enum => ("enums", "Enums"), ItemType::Function => ("functions", "Functions"), ItemType::Typedef => ("types", "Type Definitions"), ItemType::Static => ("statics", "Statics"), ItemType::Constant => ("constants", "Constants"), ItemType::Trait => ("traits", "Traits"), ItemType::Impl => ("impls", "Implementations"), ItemType::TyMethod => ("tymethods", "Type Methods"), ItemType::Method => ("methods", "Methods"), ItemType::StructField => ("fields", "Struct Fields"), ItemType::Variant => ("variants", "Variants"), ItemType::Macro => ("macros", "Macros"), ItemType::Primitive => ("primitives", "Primitive Types"), ItemType::AssocType => ("associated-types", "Associated Types"), ItemType::AssocConst => ("associated-consts", "Associated Constants"), ItemType::ForeignType => ("foreign-types", "Foreign Types"), ItemType::Keyword => ("keywords", "Keywords"), ItemType::OpaqueTy => ("opaque-types", "Opaque Types"), ItemType::ProcAttribute => ("attributes", "Attribute Macros"), ItemType::ProcDerive => ("derives", "Derive Macros"), ItemType::TraitAlias => ("trait-aliases", "Trait aliases"), } } fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) { let mut sidebar = String::new(); if items.iter().any(|it| { it.type_() == ItemType::ExternCrate || (it.type_() == ItemType::Import && !it.is_stripped()) }) { sidebar.push_str(&format!( "<li><a href=\"#{id}\">{name}</a></li>", id = "reexports", name = "Re-exports" )); } // ordering taken from item_module, reorder, where it prioritized elements in a certain order // to print its headings for &myty in &[ ItemType::Primitive, ItemType::Module, ItemType::Macro, ItemType::Struct, ItemType::Enum, ItemType::Constant, ItemType::Static, ItemType::Trait, ItemType::Function, ItemType::Typedef, ItemType::Union, ItemType::Impl, ItemType::TyMethod, ItemType::Method, ItemType::StructField, ItemType::Variant, ItemType::AssocType, ItemType::AssocConst, ItemType::ForeignType, ItemType::Keyword, ] { if items.iter().any(|it| !it.is_stripped() && it.type_() == myty) { let (short, name) = item_ty_to_strs(&myty); sidebar.push_str(&format!( "<li><a href=\"#{id}\">{name}</a></li>", id = short, name = name )); } } if !sidebar.is_empty() { write!(buf, "<div class=\"block items\"><ul>{}</ul></div>", sidebar); } } fn sidebar_foreign_type(buf: &mut Buffer, it: &clean::Item) { let sidebar = sidebar_assoc_items(it); if !sidebar.is_empty() { write!(buf, "<div class=\"block items\">{}</div>", sidebar); } } fn item_macro(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Macro) { wrap_into_docblock(w, |w| { w.write_str(&highlight::render_with_highlighting( t.source.clone(), Some("macro"), None, None, it.source.span().edition(), )) }); document(w, cx, it, None) } fn item_proc_macro(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, m: &clean::ProcMacro) { let name = it.name.as_ref().expect("proc-macros always have names"); match m.kind { MacroKind::Bang => { write!(w, "<pre class=\"rust macro\">"); write!(w, "{}!() {{ /* proc-macro */ }}", name); write!(w, "</pre>"); } MacroKind::Attr => { write!(w, "<pre class=\"rust attr\">"); write!(w, "#[{}]", name); write!(w, "</pre>"); } MacroKind::Derive => { write!(w, "<pre class=\"rust derive\">"); write!(w, "#[derive({})]", name); if !m.helpers.is_empty() { writeln!(w, "\n{{"); writeln!(w, " // Attributes available to this derive:"); for attr in &m.helpers { writeln!(w, " #[{}]", attr); } write!(w, "}}"); } write!(w, "</pre>"); } } document(w, cx, it, None) } fn item_primitive(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, cache: &Cache) { document(w, cx, it, None); render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All, cache) } fn item_keyword(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item) { document(w, cx, it, None) } crate const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang"; fn make_item_keywords(it: &clean::Item) -> String { format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap()) } /// Returns a list of all paths used in the type. /// This is used to help deduplicate imported impls /// for reexported types. If any of the contained /// types are re-exported, we don't use the corresponding /// entry from the js file, as inlining will have already /// picked up the impl fn collect_paths_for_type(first_ty: clean::Type) -> Vec<String> { let mut out = Vec::new(); let mut visited = FxHashSet::default(); let mut work = VecDeque::new(); let cache = cache(); work.push_back(first_ty); while let Some(ty) = work.pop_front() { if !visited.insert(ty.clone()) { continue; } match ty { clean::Type::ResolvedPath { did, .. } => { let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone()); let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern); if let Some(path) = fqp { out.push(path.join("::")); } } clean::Type::Tuple(tys) => { work.extend(tys.into_iter()); } clean::Type::Slice(ty) => { work.push_back(*ty); } clean::Type::Array(ty, _) => { work.push_back(*ty); } clean::Type::RawPointer(_, ty) => { work.push_back(*ty); } clean::Type::BorrowedRef { type_, .. } => { work.push_back(*type_); } clean::Type::QPath { self_type, trait_, .. } => { work.push_back(*self_type); work.push_back(*trait_); } _ => {} } } out }
write_small_section_header( w, "associated-types", "Associated Types", "<div class=\"methods\">", ); for t in types { trait_item(w, cx, t, it, cache); } write_loading_content(w, "</div>"); } if
test_nnet.py
import argparse import numpy import pandas as pd import os from keras import backend as K from keras.models import Sequential from keras.layers import Dense from keras.models import model_from_json from ngram_classifier import NGramClassifier from sklearn.metrics import precision_recall_fscore_support CLASS_WEIGHTS = [ ("num_days", 0.997821848), ("statuses_per_day", 1.065570851), ("followers_per_day", 1.021055002), ("following_per_day", 1.122703153), ("desc_len_terms", 1.171072307), ("num_list_items", 1.017727903), ("num_hashtags", 0.889418197), ("url_count", 1.018365516) ] def get_input_vector(row, classifier): ''' (classifier): p_good (classifier): p_bot num_days statuses_per_day followers_per_day following_per_day desc_len_terms num_list_items num_hashtags url_count ''' class_probs = classifier.classify_text(str(row["user_profile_description"])) ret = [ class_probs["good"], class_probs["bot"]] for label, weight in CLASS_WEIGHTS: ret.append(float(row[label]) * weight) return ret def get_training_output(row):
def recall_m(y_true, y_pred): true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) possible_positives = K.sum(K.round(K.clip(y_true, 0, 1))) recall = true_positives / (possible_positives + K.epsilon()) return recall def precision_m(y_true, y_pred): true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1))) precision = true_positives / (predicted_positives + K.epsilon()) return precision def f1_m(y_true, y_pred): precision = precision_m(y_true, y_pred) recall = recall_m(y_true, y_pred) return 2*((precision*recall)/(precision+recall+K.epsilon())) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("-i", "--input", help="test input csv file") parser.add_argument("-m", "--model", help="ngram model file") parser.add_argument("-n", "--nnetmodel", help="NNet model file") args = parser.parse_args() if not args.input: raise "missing input file" if not args.model: raise "missing ngram model file" if not args.nnetmodel: raise "missing nnet model file" classifier = NGramClassifier(model_path=args.model) with open(args.nnetmodel, 'r') as json_file: loaded_model_json = json_file.read() nnet = model_from_json(loaded_model_json) nnet.load_weights(f'{args.nnetmodel}.h5') nnet.compile(loss='binary_crossentropy', optimizer='adam', metrics=['acc',f1_m,precision_m, recall_m]) df_test = pd.read_csv(args.input, keep_default_na=False) targets_x = [] targets_y = [] predictions = [] for index, row in df_test.iterrows(): input_vector = get_input_vector(row, classifier) targets_x.append(input_vector) targets_y.append(get_training_output(row)) loss, accuracy, f1_score, precision, recall = nnet.evaluate(numpy.array(targets_x), numpy.array(targets_y), verbose=0) print(f'loss: {loss}, acc: {accuracy}, prec: {precision}, recall: {recall}, f1: {f1_score}')
class_label = str(row["class_value"]) return 0.0 if class_label == "good" else 1.0
views.py
import datetime import math import requests_cache from bs4 import BeautifulSoup from rest_framework import status from rest_framework.decorators import api_view from rest_framework.response import Response from api.modules.holidays.constants import HOLIDAYS_PAGE_URL, HINDI_DAY_STRING_MAP, HINDI_MONTH_STRING_MAP from api.modules.holidays.utils import load_url_content week_difference = datetime.timedelta(days=7) requests_cache.install_cache(expire_after=week_difference) @api_view(['GET']) def get_upcoming_holidays(request, year):
""" Returns a list of all the holidays in a given year :param request: :param year: :return: 400 if unable to get response from Holidays Page :return: 503 if unable to correctly parse Holidays Page :return: 200 successful """ holiday_data = [] try: html = load_url_content(HOLIDAYS_PAGE_URL.format(year)) if html: soup = BeautifulSoup(html, 'html.parser') cells = soup.findAll(['th', 'td']) row_len = int(math.ceil(len(cells) / 4)) for ctr in range(row_len): if ctr == 0: continue offset = ctr * 4 holiday_type = cells[offset + 3].text.split() date_string = cells[offset + 0].text.strip().split(" ") day_string = cells[offset + 1].text.strip() # Check if HTML response is in Hindi # If in Hindi, replace with English counterpart if date_string[1] in HINDI_MONTH_STRING_MAP.keys(): date_string[1] = HINDI_MONTH_STRING_MAP[date_string[1]][:3] day_string = HINDI_DAY_STRING_MAP[day_string] try: dt = datetime.datetime.strptime(" ".join(date_string), '%d %b') except ValueError: dt = datetime.datetime.strptime(" ".join(date_string), '%b %d') holiday_obj = { 'month': dt.strftime('%B'), 'date': int(dt.strftime('%d')), 'day': day_string, 'name': cells[offset + 2].text.strip(), 'type': holiday_type[0] } holiday_data.append(holiday_obj) else: return Response(holiday_data, status=status.HTTP_400_BAD_REQUEST) except Exception as e: return Response(str(e), status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(holiday_data, status=status.HTTP_200_OK)
SyncSettingsCell.tsx
import React, { useMemo } from "react"; import styled from "styled-components"; import { useIntl } from "react-intl"; import { Cell } from "components/SimpleTableComponents"; import DropDown, { DropDownRow } from "components/DropDown"; import { IDataItem } from "components/DropDown/components/ListItem"; import { SyncMode, SyncSchemaField, SyncSchemaStream, } from "core/domain/catalog"; const DropDownContainer = styled.div` padding-right: 10px; `; const StyledDropDown = styled(DropDown)` & ~ .rw-popup-container { min-width: 260px; left: auto; } `; type IProps = { streamNode: SyncSchemaStream; fields: SyncSchemaField[]; onSelect: (data: IDataItem) => void; }; function
( fields: SyncSchemaField[], cb: (field: SyncSchemaField) => void ) { fields.forEach((field) => { cb(field); if (field.fields) { traverse(field.fields, cb); } }); } const SyncSettingsCell: React.FC<IProps> = ({ streamNode, fields, onSelect, }) => { const { stream, config } = streamNode; const formatMessage = useIntl().formatMessage; const fullData = useMemo(() => { const syncData: DropDownRow.IDataItem[] = stream.supportedSyncModes .filter((mode) => mode !== SyncMode.Incremental) .map((mode) => ({ value: mode, text: formatMessage({ id: `sources.${mode}`, defaultMessage: mode, }), })); const isIncrementalSupported = stream.supportedSyncModes.includes( SyncMode.Incremental ); // If INCREMENTAL is included in the supported sync modes... if (isIncrementalSupported) { // If sourceDefinedCursor is true, In the dropdown we should just have one row for incremental if (stream.sourceDefinedCursor) { syncData.push({ text: formatMessage({ id: "sources.incrementalSourceCursor", }), value: SyncMode.Incremental, }); } else { // If sourceDefinedCursor is false... // If defaultCursorField is set, then the field specified in there should be at the top of the list // and have the word "(default)" next to it if (stream.defaultCursorField?.length) { syncData.push({ text: formatMessage( { id: "sources.incrementalDefault", }, { value: stream.defaultCursorField[0] } ), value: stream.defaultCursorField[0], secondary: true, groupValue: SyncMode.Incremental, groupValueText: formatMessage({ id: "sources.incremental", }), }); } // Any column of primitive type in the stream can be used as the cursor traverse(fields, (field) => { if ( field.type !== "object" && !syncData.some((dataItem) => dataItem.value === field.cleanedName) ) { syncData.push({ text: field.cleanedName, value: field.cleanedName, secondary: true, groupValue: SyncMode.Incremental, groupValueText: formatMessage({ id: "sources.incremental", }), }); } }); } } return syncData; }, [fields, stream, formatMessage]); const currentValue = config.cursorField?.length ? stream.sourceDefinedCursor ? SyncMode.Incremental : config.cursorField[0] : config.syncMode || ""; return ( <Cell> <DropDownContainer> <StyledDropDown fullText hasFilter withBorder value={currentValue} data={fullData} onSelect={onSelect} groupBy="groupValueText" filterPlaceholder={formatMessage({ id: "sources.searchIncremental", })} /> </DropDownContainer> </Cell> ); }; export default SyncSettingsCell;
traverse
tun.go
/* SPDX-License-Identifier: MIT * * Copyright (C) 2017-2021 WireGuard LLC. All Rights Reserved. */ package device import ( "fmt" "sync/atomic" "github.com/Skyxim/wireguard/tun" )
func (device *Device) RoutineTUNEventReader() { device.log.Verbosef("Routine: event worker - started") for event := range device.tun.device.Events() { if event&tun.EventMTUUpdate != 0 { mtu, err := device.tun.device.MTU() if err != nil { device.log.Errorf("Failed to load updated MTU of device: %v", err) continue } if mtu < 0 { device.log.Errorf("MTU not updated to negative value: %v", mtu) continue } var tooLarge string if mtu > MaxContentSize { tooLarge = fmt.Sprintf(" (too large, capped at %v)", MaxContentSize) mtu = MaxContentSize } old := atomic.SwapInt32(&device.tun.mtu, int32(mtu)) if int(old) != mtu { device.log.Verbosef("MTU updated: %v%s", mtu, tooLarge) } } if event&tun.EventUp != 0 { device.log.Verbosef("Interface up requested") device.Up() } if event&tun.EventDown != 0 { device.log.Verbosef("Interface down requested") device.Down() } } device.log.Verbosef("Routine: event worker - stopped") }
const DefaultMTU = 1420
NaiveBayesClassifier.py
from contractionsDict import contractionsDict import pandas as pd import time import numpy as np import re from pattern.en import pluralize, singularize import sys import csv from LemmitizationandStemConverter import ObtainStemAndLemmatizationWord def priorProb(scv): pct = 0 #positive count total nct = 0 #negative count total Nct = 0 #neutral count total ntt = 0 #no. training tweets for index, row in scv.items(): #print(row) if(row.lower() == 'positive'): pct+=1 if(row.lower() == 'negative'): nct+=1 if(row.lower() == 'neutral'): Nct+=1 ntt+=1 pc1 = pct/ntt #Postive Class 1 nc2 = nct/ntt #Negative Class 2 nc3 = Nct/ntt #Neutral Class 3 return((pc1, nc2, nc3)) def removeEmojis(txt): emoji_pattern = re.compile(u"[^\U00000000-\U0000d7ff\U0000e000-\U0000ffff]", flags=re.UNICODE) return(emoji_pattern.sub(u' ', txt)) def expandContractions(s, contractionsDict=contractionsDict): contractionsRe = re.compile('(%s)' % '|'.join(contractionsDict.keys())) def
(match): return contractionsDict[match.group(0)] return contractionsRe.sub(replace, s) def CleanUp(text): #Removes links from tweet: text = re.sub('http://\S+|https://\S+', ' ', text) #Remove #, _, -, and @ from tweet: text = text.replace("#", " ").replace("_", " ").replace("@", " ").replace("-", " ") #Replace ? with questionmark and ! with exclaimationmark: text = text.replace("?", " questionmark").replace("!", " exclaimationmark") #Remove all other non alphanumeric special characters from tweet: text = re.sub('\W+ ',' ', text) #Removes whitespaces from tweet: text = text.replace("\t", " ").replace("\n", " ") text = re.sub(r' {2,}' , ' ', text) #Removes emojis from tweet: text = removeEmojis(text) return text def likelihoodFunctionInformation(txt, ldf): tsv = 0 #Total Sentiment Value npw = 0 #No. of positive words nnw = 0 #No. negative words nNw = 0 #No. of neutral words psv = 0 #Previous Word sentiment value nac = False #Negative conjuctive Adverb check wrd = " " #Word to parse t3 = time.time() for ewt in txt.split(): #Check for all versions of word in Sentiment Dictionary: #print(ewt) #t1 = time.time() sll = ObtainStemAndLemmatizationWord(ewt) #Obtaining the noun version and root version of word using the function. #print(sll) if(sll[0]!=ewt): if(bool(sll[0] and sll[0].strip())==True): #Checing if the noun part of the word is in the Sentiment Dictionary. snw = singularize(sll[0]) #Noun part of word in singular tense. pnw = pluralize(sll[0]) #Noun part of word in plural tense. srw = singularize(sll[1]) #Root part of word in singular tense. prw = pluralize(sll[1]) #Root part of word in plural tense. #Check if singular part of noun of word is in the Sentiment Dictionary: if((snw in ldf[0].word.values) or (snw in ldf[1].word.values) or (snw in ldf[2].word.values) or (snw in ldf[3].word.values)): wrd = snw #Check if plural part of noun of word is in the Sentiment Dictionary: elif((pnw in ldf[0].word.values) or (pnw in ldf[1].word.values) or (pnw in ldf[2].word.values) or (pnw in ldf[3].word.values)): wrd = pnw #Check if singular part of root of word is in the Sentiment Dictionary: elif((srw in ldf[0].word.values) or (srw in ldf[1].word.values) or (srw in ldf[2].word.values) or (srw in ldf[3].word.values)): wrd = srw #Check if plural part of root of word is in the Sentiment Dictionary: elif((prw in ldf[0].word.values) or (prw in ldf[1].word.values) or (prw in ldf[2].word.values) or (prw in ldf[3].word.values)): wrd = prw else: wrd = ewt elif(sll[1]!=ewt): #Checking if the root version of the word is in the Sentiment Dictionary. srw = singularize(sll[1]) #Root part of word in singular tense. prw = pluralize(sll[1]) #Root part of word in plural tense. #Check if singular part of root of word is in the Sentiment Dictionary: if((srw in ldf[0].word.values) or (srw in ldf[1].word.values) or (srw in ldf[2].word.values) or (srw in ldf[3].word.values)): wrd = srw #Check if plural part of root of word is in the Sentiment Dictionary: elif((prw in ldf[0].word.values) or (prw in ldf[1].word.values) or (prw in ldf[2].word.values) or (prw in ldf[3].word.values)): wrd = prw else: wrd = ewt else: wrd = ewt else: wrd = ewt wrd = ewt #Run the Likelihood Function Information on the word. wsv = 0 #Word Sentiment Value sfw = singularize(wrd) #Singular Form of Word pfw = pluralize(wrd) #Plural Form of Word #print(wrd, tsv) #Very Important Print Statement for Debugging #Checking if word matches a negative conjuctive adverb that forms different phrases in the tweet: if wrd.lower()=='not' or wrd.lower()=='but' or wrd.lower()=='however' or wrd.lower()=='instead' or wrd.lower()=='otherwise' or wrd.lower()=='contrarily': if(nac==False): nac=True else: nac=False if(nac==False): #Checking if words match special words if sfw.lower()=='maga': npw += 100 tsv += 100 elif sfw.lower()=='makeamericagreatagain': npw += 100 tsv += 100 elif sfw.lower()=='make america great again': npw += 100 tsv += 100 elif "email" in sfw.lower(): nnw += 5 tsv -= 5 elif wrd.lower()=='questionmark': if(psv>0): nnw += 10 tsv -= 10 if(psv<0): npw += 10 tsv += 10 psv = 0 elif wrd.lower()=='exclaimationmark': if(psv<0): nnw += 10 tsv -= 10 if(psv>0): npw += 10 tsv += 10 psv = 0 #Checking if word exists in the Sentiment Dictionary. Assign sentiment value and/or category if word exists. Otherwise categorize word as neutral. elif sfw.lower() in ldf[0].word.values: #Check if singular version of word is in dataframe1 wsv = int(ldf[0].iloc[ldf[0]['word'].loc[lambda x: x==sfw.lower()].index.tolist()[0]].sentiment) #print(ewt, sfw, 1, wsv, tsv) if(wsv>0): npw += 1 elif(wsv<0): nnw += 1 tsv += wsv psv = wsv elif pfw.lower() in ldf[0].word.values: #Check if plural version of word is in dataframe1 wsv = int(ldf[0].iloc[ldf[0]['word'].loc[lambda x: x==pfw.lower()].index.tolist()[0]].sentiment) #print(ewt, pfw, 1, wsv, tsv) if(wsv>0): npw += 1 elif(wsv<0): nnw += 1 tsv += wsv psv = wsv elif sfw.lower() in ldf[1].word.values: #Check if singular version of word is in dataframe2 #print(ewt, sfw, 2) wsv = int(ldf[1].iloc[ldf[1]['word'].loc[lambda x: x==sfw.lower()].index.tolist()[0]].sentiment) if(wsv>0): npw += 1 elif(wsv<0): nnw += 1 tsv += wsv psv = wsv elif pfw.lower() in ldf[1].word.values: #Check if plural version of word is in dataframe2 #print(ewt, pfw, 2) wsv = int(ldf[1].iloc[ldf[1]['word'].loc[lambda x: x==pfw.lower()].index.tolist()[0]].sentiment) if(wsv>0): npw += 1 elif(wsv<0): nnw += 1 tsv += wsv psv = wsv elif sfw.lower() in ldf[2].word.values: #Check if singular version of word is in dataframe3 #print(ewt, sfw, 3, tsv) npw += 1 psv = 3 elif pfw.lower() in ldf[2].word.values: #Check if plural version of word is in dataframe3 #print(ewt, pfw, 3, tsv) npw += 1 psv = 3 elif sfw.lower() in ldf[3].word.values: #Check if singular version of word is in dataframe4 #print(ewt, sfw, 4) nnw += 1 psv = -3 elif pfw.lower() in ldf[3].word.values: #Check if plural version of word is in dataframe4 #print(ewt, pfw, 4) nnw += 1 psv = -3 else: #The word must be a "neutral" word #print(wrd, sfw, pfw) nNw += 1 else: #Checking if words match special words if sfw.lower()=='maga': npw += 100 tsv += 100 elif sfw.lower()=='makeamericagreatagain': npw += 100 tsv += 100 elif sfw.lower()=='make america great again': npw += 100 tsv += 100 elif "email" in sfw.lower(): nnw += 5 tsv -= 5 elif wrd.lower()=='questionmark': if(psv>0): npw += 10 tsv += 10 if(psv<0): nnw += 10 tsv -= 10 psv = 0 nac==False elif wrd.lower()=='exclaimationmark': if(psv<0): npw += 10 tsv += 10 if(psv>0): nnw += 10 tsv -= 10 psv = 0 nac==False #Checking if word exists in the Sentiment Dictionary. Assign sentiment value and/or category if word exists. Otherwise categorize word as neutral. elif sfw.lower() in ldf[0].word.values: #Check if singular version of word is in dataframe1 wsv = int(ldf[0].iloc[ldf[0]['word'].loc[lambda x: x==sfw.lower()].index.tolist()[0]].sentiment) #print(sfw, 1, wsv, tsv) if(wsv>0): nnw += 1 elif(wsv<0): npw += 1 tsv -= wsv psv = -wsv nac=False elif pfw.lower() in ldf[0].word.values: #Check if plural version of word is in dataframe1 wsv = int(ldf[0].iloc[ldf[0]['word'].loc[lambda x: x==pfw.lower()].index.tolist()[0]].sentiment) #print(pfw, 1, wsv, tsv) if(wsv>0): nnw += 1 elif(wsv<0): npw += 1 tsv -= wsv psv = -wsv nac==False elif pfw.lower() in ldf[0].word.values: #Check if plural version of word is in dataframe1 wsv = int(ldf[0].iloc[ldf[0]['word'].loc[lambda x: x==pfw.lower()].index.tolist()[0]].sentiment) #print(pfw, 1, wsv, tsv) if(wsv>0): npw -= 1 elif(wsv<0): nnw -= 1 tsv -= wsv psv = -wsv nac==False elif sfw.lower() in ldf[1].word.values: #Check if singular version of word is in dataframe2 #print(sfw, 2) wsv = int(ldf[1].iloc[ldf[1]['word'].loc[lambda x: x==sfw.lower()].index.tolist()[0]].sentiment) if(wsv>0): nnw += 1 elif(wsv<0): npw += 1 tsv -= wsv psv = -wsv nac==False elif pfw.lower() in ldf[1].word.values: #Check if plural version of word is in dataframe2 #print(pfw, 2) wsv = int(ldf[1].iloc[ldf[1]['word'].loc[lambda x: x==pfw.lower()].index.tolist()[0]].sentiment) if(wsv>0): nnw += 1 elif(wsv<0): npw += 1 tsv -= wsv psv = -wsv nac==False elif sfw.lower() in ldf[2].word.values: #Check if singular version of word is in dataframe3 #print(sfw, 3, tsv) nnw += 1 psv = -3 nac==False elif pfw.lower() in ldf[2].word.values: #Check if plural version of word is in dataframe3 #print(pfw, 3, tsv) nnw += 1 psv = -3 nac==False elif sfw.lower() in ldf[3].word.values: #Check if singular version of word is in dataframe4 #print(sfw, 4) npw += 1 psv = 3 nac==False elif pfw.lower() in ldf[3].word.values: #Check if plural version of word is in dataframe4 #print(pfw, 4) npw += 1 psv = 3 nac==False else: #The word must be a "neutral" word #print(wrd, sfw, pfw) nNw += 1 #t2 = time.time() #print("Amount of time taken to parse word: " + str(t2-t1) + "sec") t4 = time.time() print("Amount of time taken to parse tweet: " + str(t4-t3) + "sec") return(npw, nnw, nNw, tsv) def NaiveBayes(txt, ppl, tov): #tov = likelihoodFunctionInformation(ctt, [df1, df2, df3, df4]) #Obtain tuple of values required to calculate the Likelihood funnction and posterior probability pPp = ppl[0] #Positive class Prior Probability pnp = ppl[1] #Negative class Prior Probability pNp = ppl[2] #Neutral class Prior Probability npw = tov[0] #No. of positive words nnw = tov[1] #No. of negative words nNw = tov[2] #No. of neutral words tsv = tov[3] #Total Sentiment Value tnw = npw + nnw + nNw #Total no. of words cls = " " #Defining the class which the text belongs to. #print(npw, nnw, nNw, tsv) if(npw==0 and nnw==0): cls = "neutral" #Class is set to Neutral else: if(tsv==0): den = (pPp*(1-np.exp(-1*((npw*5)/(tnw))))) + (pnp*(1-np.exp(-1*((nnw*5)/(tnw))))) + (pNp*(1-np.exp(-1*((nNw)/(tnw))))) #Calculate the denominator for the posterior probabilities #Posterior Probability of sentiment of text is positive given the text: ppp = (pPp*(1-np.exp(-1*((npw*5)/(tnw)))))/(den) #print((1-np.exp(-1*(npw*10)))) #print(ppp) #Posterior Probability of sentiment of text is negative given the text: npp = (pnp*(1-np.exp(-1*((nnw*5)/(tnw)))))/(den) #print((1-np.exp(-1*(nnw*10)))) #print(npp) #Posterior Probability of sentiment of text is neutral given the text: Npp = (pNp*(1-np.exp(-1*((nNw)/(tnw)))))/(den) #print((1-np.exp(-1*(nNw*10)))) #print(Npp) #Determine the sentimentality of text: if(max([ppp,npp,Npp])==ppp): cls = "positive" if(max([ppp,npp,Npp])==npp): cls = "negative" if(max([ppp,npp,Npp])==Npp): cls = "neutral" elif(tsv>0): den = (pPp*(1-np.exp(-1*((npw*5*tsv)/(tnw))))) + (pnp*(1-np.exp(-1*((nnw*5)/(tnw))))) + (pNp*(1-np.exp(-1*((nNw)/(tnw*1.45))))) #Calculate the denominator for the posterior probabilities. #Posterior Probability of sentiment of text is positive given the text: ppp = (pPp*(1-np.exp(-1*((npw*5*tsv)/(tnw)))))/(den) #print((1-np.exp(-1*(npw*10)))) #print(ppp) #Posterior Probability of sentiment of text is negative given the text: npp = (pnp*(1-np.exp(-1*((nnw*5)/(tnw)))))/(den) #print((1-np.exp(-1*(nnw*10)))) #print(npp) #Posterior Probability of sentiment of text is neutral given the text: Npp = (pNp*(1-np.exp(-1*((nNw)/(tnw*1.45)))))/(den) #print((1-np.exp(-1*(nNw*10)))) #print(Npp) #Determine the sentimentality of text: if(max([ppp,npp,Npp])==ppp): cls = "positive" if(max([ppp,npp,Npp])==npp): cls = "negative" if(max([ppp,npp,Npp])==Npp): cls = "neutral" else: den = (pPp*(1-np.exp(-1*((npw*5)/(tnw))))) + (pnp*(1-np.exp(-1*((nnw*5*abs(tsv))/(tnw))))) + (pNp*(1-np.exp(-1*((nNw)/(tnw*1.45))))) #Calculate the denominator for the posterior probabilities. #Posterior Probability of sentiment of text is positive given the text: ppp = (pPp*(1-np.exp(-1*((npw*5*tsv)/(tnw)))))/(den) #print((1-np.exp(-1*(npw*10)))) #print(ppp) #Posterior Probability of sentiment of text is negative given the text: npp = (pnp*(1-np.exp(-1*((nnw*5*abs(tsv))/(tnw)))))/(den) #print((1-np.exp(-1*(nnw*10)))) #print(npp) #Posterior Probability of sentiment of text is neutral given the text: Npp = (pNp*(1-np.exp(-1*((nNw)/(tnw*1.45)))))/(den) #print((1-np.exp(-1*(nNw*10)))) #print(Npp) #Determine the sentimentality of text: if(max([ppp,npp,Npp])==ppp): cls = "positive" if(max([ppp,npp,Npp])==npp): cls = "negative" if(max([ppp,npp,Npp])==Npp): cls = "neutral" return cls #############Loading the Datasets:#################### pd.set_option("display.max_rows", None, "display.max_columns", None) #Training Dataset: dft = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/trainingdataset.csv", sep=",", skiprows=[0], header=None, usecols=[0,1], names=["tweet_text","sentiment"]) #Testing Dataset: dfT = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/testingdataset.csv", sep=",", skiprows=[0], header=None, usecols=[0,1], names=["tweet_text","sentiment"]) #Sample Dataset: dfs = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/sampleDataset.csv", sep=",", skiprows=[0], header=None, usecols=[0,1,2], names=["tweetid", "userid", "tweet_text"]) #Main Dataset: dfn = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/CoreBotTweetsCombinedEN.csv", sep=",", skiprows=[0], header=None, usecols=[0,1,2], names=["tweetid","userid", "tweet_text"]) #Sentiment Dataset 1: df1 = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/SentimentDictionary/AFINN-111.txt", sep="\t", header=None, usecols=[0,1], names=["word","sentiment"]) #Sentiment Dataset 2: df2 = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/SentimentDictionary/AFINN-96.txt", sep="\t", header=None, usecols=[0,1], names=["word","sentiment"]) #Sentiment Dataset 3 [Positive Words Only]: df3 = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/SentimentDictionary/Positivewords.txt", sep="\n", header=None, usecols=[0], names=["word"]) #Sentiment Dataset 4 [Negative Words Only]: df4 = pd.read_csv("/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/datasets/SentimentDictionary/Negativewords.txt", sep="\n", header=None, usecols=[0], names=["word"]) #Dataset required to classify each tweet and its sentimentality to its corresponding bot: dfc = pd.DataFrame(columns=["tweetid", "userid", "tweet_candidate_class", "tweet_sentiment_class"]) #############Running the Naive Bayesian Classifer:#################### #Obtain the list of Prior Probabilities obtained from Training Dataset: tts = dft["sentiment"].count() #Total no. of Training Sentiment values. tTs = dfT["sentiment"].count() #Total no. of Testing sentiment values. #Append all the Testing sentiment values with the Training sentiment values to obtain a complete list of sentiments used as priorProbabalities for classification of all political tweets sent by "CoreBotTweetsCombinedEN.csv". for i in range(tts, tts+tTs): dft["sentiment"][i] = dfT["sentiment"][i-tts] ppl = priorProb(dft.sentiment) loc = [] #List of classes for each text row in the dataframe. #Dictionary that stores lists used to calculate demographic statistics below: pbd = {} #Political Bot Dictionary. I.e. Dictionary of all twitter bots that tweeted, replied to, or retweeted political comments that affected the 2016 elections. The key represents the bot's userid. The value is a list of class types it belongs to. i.e. Value = ["Trump", "positive", "ProTrump"]. for index, row in dfn.iterrows(): #print(CleanUp(expandContractions(row["tweet_text"].replace("’", "'")))) ctt = CleanUp(expandContractions(row["tweet_text"].replace("’", "'"))) #Cleaned Tweet cot = NaiveBayes(ctt, ppl, likelihoodFunctionInformation(ctt, [df1, df2, df3, df4])) #print(cot) loc.append(cot) tnr = 0 #Total No. of right words. mcp = 0 #MisClassification percentage. tap = 0 #Total Accuracy percentage. npt = 0 #No. of positive Trump tweets. nnt = 0 #No. of negative Trump tweets. nNt = 0 #No. of neutral Trump tweets. npc = 0 #No. of positive Clinton tweets. nnc = 0 #No. of negative Clinton tweets. nNc = 0 #No. of neutral Clinton tweets. ngt = 0 #No. of general tweets. [i.e. Not Trump or Hillary]. tht = False #Is the tweet a Trump or Hillary tweet? tcc = " " #Setting the tweet candidate class [i.e. Trump, Hillary, Neutral] for the classification below. tsc = " " #Setting the tweet sentiment class [i.e. Positive, Negative, Neutral] for the classification below. toc = " " #Setting the tweet overall class. [i.e. ProTrump, AntiClinton, etc;] for the classification below. #t="RT @Trumpocrats: @TallahForTrump @tariqnasheed I'm beside myself by his hate for America and how we have done so much to free an entire rac..." #print(t) #print("Actual Sentiment: " + "negative") #print("Calculated Sentiment: " + str(cot)) for i in range(0,len(loc)): #Recording no. of correct tweets: #print(dfn.iloc[i].tweet_text) #print("Actual Sentiment: " + dft.iloc[i].sentiment) #print("Calculated Sentiment: " + loc[i]) ''' if(loc[i].lower()==dft.iloc[i].sentiment.lower()): tnr += 1 #Use to calculate accuracy of classifier; Not for running entire algorithm ''' #Classification of Tweets to Trump, Hillary or Neutral: if("trump" in dfn.iloc[i].tweet_text.lower() or "donald" in dfn.iloc[i].tweet_text.lower()): tht = True if(("email" in dfn.iloc[i].tweet_text.lower()) or ("makeamericagreatagain" in dfn.iloc[i].tweet_text.lower()) or ("make america great again" in dfn.iloc[i].tweet_text.lower()) or ("maga" in dfn.iloc[i].tweet_text.lower()) or ("russia" in dfn.iloc[i].tweet_text.lower())): npt += 1 tcc = "Trump" tsc = "Positive" toc = "ProTrump" else: if(loc[i]=="positive"): npt += 1 tcc = "Trump" tsc = "Positive" toc = "ProTrump" if(loc[i]=="negative"): nnt += 1 tcc = "Trump" tsc = "Negative" toc = "AntiTrump" if(loc[i]=="neutral"): nNt += 1 tcc = "Trump" tsc = "Neutral" toc = "Neutral" if("clinton" in dfn.iloc[i].tweet_text.lower() or "hillary" in dfn.iloc[i].tweet_text.lower()): tht = True if(("email" in dfn.iloc[i].tweet_text.lower()) or ("makeamericagreatagain" in dfn.iloc[i].tweet_text.lower()) or ("make america great again" in dfn.iloc[i].tweet_text.lower()) or ("maga" in dfn.iloc[i].tweet_text.lower()) or ("russia" in dfn.iloc[i].tweet_text.lower())): nnc += 1 tcc = "Clinton" tsc = "Negative" toc = "AntiClinton" else: if(loc[i]=="positive"): npc += 1 tcc = "Clinton" tsc = "Positive" toc = "ProClinton" if(loc[i]=="negative"): tcc = "Clinton" tsc = "Negative" toc = "AntiClinton" nnc += 1 if(loc[i]=="neutral"): tcc = "Clinton" tsc = "Neutral" toc = "Neutral" nNc += 1 if(tht==False): ngt += 1 tcc = "Neutral" tsc = "Neutral" toc = "Neutral" tht = False #############Information required to classify each tweet and its sentimentality to its corresponding bot:######################### fsn="/root/.encrypted/.pythonSai/kCoreBots/CoreBotEN/MachineLearning/NaiveBayes/CoreBotsSentiment/Bot-"+dfn.iloc[i].userid+"-EN.csv" #Assign Values to our political Bot Dictionary defined above: tmp = [tcc, tsc, toc] #Temporary List if(dfn.iloc[i].userid in pbd.keys()): if(tmp not in pbd[dfn.iloc[i].userid]): tvl = dfn.iloc[i].userid #temporary value pbd[tvl]=pbd[tvl]+[tmp] else: pbd[dfn.iloc[i].userid] = [tmp] #Assign values to temporary dataset that will stream these values into the designated csv file. dfc.loc[i] = [dfn.iloc[i].tweetid, dfn.iloc[i].userid, tcc, tsc] dfc[["tweetid", "userid","tweet_candidate_class", "tweet_sentiment_class"]].to_csv(fsn, mode='a', sep=',', header=False, index=False) #Clear this temporary dataset for it to be useable in the next iteration. dfc = dfc.iloc[i:] #Printing our classification results: print("******************Trump Sentimentality amongst bots:*******************") print("Total no. of positive Trump tweets = " + str(npt)) print("Total no. of negative Trump tweets = " + str(nnt)) print("Total no. of neutral Trump tweets = " + str(nNt)) print("Total no. of Trump tweets = "+ str(npt+nnt+nNt)) print("******************Clinton Sentimentality amongst bots:*****************") print("Total no. of positive Clinton tweets = " + str(npc)) print("Total no. of negative Clinton tweets = " + str(nnc)) print("Total no. of neutral Clinton tweets = " + str(nNc)) print("Total no. of Clinton tweets = "+ str(npc+nnc+nNc)) print("******************General Sentimentality amongst bots:*****************") print("Total no. of general [not candidate related] tweets = " + str(ngt)) print("*****************General demographics of the bots:*********************") nmc = 0 #Total No. of bots that represent multiple classes. I.e. Have multiple sentiments or are targetting multiple candidates. npn = 0 #Total No. of bots that are both positive and negative in sentimentality. ntc = 0 #Total No. of bots that target both Trump and Clinton. nPtAc = 0 #Total No. of bots that are Pro Trump and Anti Clinton. nPtAt = 0 #Total No. of bots that are Pro Trump and Anti Trump. nAtPc = 0 #Total No. of bots that are Anti Trump and Pro Clinton. nPcAc = 0 #Total No. of bots that are Pro Clinton and Anti Clinton. nPtPc = 0 #Total No. of bots that are Pro Trump and Pro Clinton. nAtAc = 0 #Total No. of bots that are Anti Trump and Anti Clinton. for key, val in pbd.items(): if(len(val)>1): nmc += 1 if(any("Positive" in all for all in val) and any("Negative" in all for all in val)): npn += 1 if(any("Trump" in all for all in val) and any("Clinton" in all for all in val)): ntc += 1 if(any("ProTrump" in all for all in val) and any("AntiClinton" in all for all in val)): nPtAc += 1 if(any("ProTrump" in all for all in val) and any("AntiTrump" in all for all in val)): nPtAt += 1 if(any("AntiTrump" in all for all in val) and any("ProClinton" in all for all in val)): nAtPc += 1 if(any("ProClinton" in all for all in val) and any("AntiClinton" in all for all in val)): nPcAc += 1 if(any("ProTrump" in all for all in val) and any("ProClinton" in all for all in val)): nPtPc += 1 if(any("AntiTrump" in all for all in val) and any("AntiClinton" in all for all in val)): nAtAc += 1 #Oprint(pbd) print("Total no. of bots that have multiple classes = " +str(nmc)) print("Total no. of bots that are both positive and neagtive in sentimentality = " +str(npn)) print("Total no. of bots that target both Trump and Hillary = " +str(ntc)) print("Total no. of bots that are both ProTrump and AntiClinton = " +str(nPtAc)) print("Total no. of bots that are both ProTrump and AntiTrump = " +str(nPtAt)) print("Total no. of bots that are both AntiTrump and ProClinton = " +str(nAtPc)) print("Total no. of bots that are both ProClinton and AntiClinton = " +str(nPcAc)) print("Total no. of bots that are both ProTrump and ProClinton = " +str(nPtPc)) print("Total no. of bots that are both AntiTrump and AntiClinton = " +str(nAtAc)) ''' #Accuracy and Misclassification Rate of Classifier: print("Accuracy Percentage of Classifier: " + str((tnr/len(loc))*100) + "%") print("Misclassification Percentage of Classifier: " + str((1-(tnr/len(loc)))*100) + "%") '''
replace
losses.py
import torch from torch import nn from torch.nn import functional as F from torchutils import to_device class
(nn.Module): """weighted version of Focal Loss""" def __init__(self, alpha=.25, gamma=2, device=None): super(FocalLoss, self).__init__() self.alpha = torch.tensor([alpha, 1 - alpha]) # self.alpha = to_device(self.alpha, device=device) self.gamma = gamma def forward(self, inputs, targets): BCE_loss = F.binary_cross_entropy(inputs, targets.float(), reduction='none') targets = targets.long() at = self.alpha.to(targets.device).gather(0, targets.view(-1)) pt = torch.exp(-BCE_loss) F_loss = at * (1 - pt) ** self.gamma * BCE_loss return F_loss.mean() def binary_cross_entropy_weighted_focal_loss(y_pred, y_true, alpha=0.25, gamma=6, mask=None): return FocalLoss(alpha=alpha, gamma=gamma, )(y_pred, y_true) def cross_entropy_focal_loss(y_pred, y_true, weight=None, alpha=0.25, gamma=6, mask=None): # important to add reduction='none' to keep per-batch-item loss ce_loss = F.cross_entropy(y_pred, y_true, reduction='none', weight=weight) pt = torch.exp(-ce_loss) focal_loss = (alpha * (1 - pt) ** gamma * ce_loss).mean() # mean over the batch return focal_loss def binary_cross_entropy_focal_loss___(y_pred, y_true, alpha=0.25, gamma=6, mask=None): # important to add reduction='none' to keep per-batch-item loss ce_loss = F.binary_cross_entropy(y_pred, y_true, reduction='none') pt = torch.exp(-ce_loss) focal_loss = (alpha * (1 - pt) ** gamma * ce_loss).mean() # mean over the batch return focal_loss def bce_focal_loss(alpha=0.25, gamma=6): def fn(y_pred, y_true, mask=None): return binary_cross_entropy_focal_loss___(y_pred, y_true, alpha, gamma, mask=mask) return fn def ce_focal_loss(alpha=0.25, gamma=6): def fn(y_pred, y_true, mask=None): return cross_entropy_focal_loss(y_pred, y_true, alpha, gamma, mask=mask) return fn
FocalLoss
.happydoc.fsa.py
(S'822675c38199b44f85699c1653abb0fc' p1 (ihappydoclib.parseinfo.moduleinfo ModuleInfo p2 (dp3 S'_namespaces' p4 ((dp5 S'FSA' p6 (ihappydoclib.parseinfo.classinfo ClassInfo p7 (dp8 g4 ((dp9 (dp10 S'nextStates' p11 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp12 g4 ((dp13 (dp14 tsS'_exception_info' p15 (dsS'_parameter_names' p16 (S'self' p17 S'state' p18 S'input' p19 tsS'_parameter_info' p20 (dp21 g19 (NNNtsg17 (NNNtsg18 (NNNtssS'_filename' p22 S'fsa.py' p23 sS'_docstring' p24 S'' sS'_name' p25 g11 sS'_parent' p26 g7 sS'_function_info' p27 g14 sS'_configuration_values' p28 (dsS'_class_info' p29 g13 sS'_comment_info' p30 (dp31 (S'FSA' p32 S'labelMatches' tS' \n Accepting\n \n' p33 s(g32 S'sorted' tS' \n Reductions\n \n' p34 s(g32 S'create' tS' \n Copying\n \n' p35 s(g32 S'complement' tS' \n FSA operations\n \n' p36 s(g32 S'hasArcMetadata' tS' \n Arc Metadata Accessors\n \n' p37 s(g32 S'__repr__' tS' \n Presentation Methods\n \n' p38 s(g32 S'makeStateTable' tS' \n Initialization\n \n' p39 s(g32 S'isEmpty' tS' \n Predicates\n \n' p40 s(g32 S'epsilonClosure' tS' \n Accessors\n \n' p41 ssS'_comments' p42 S'' sbsS'makeStateTable' p43 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp44 g4 ((dp45 (dp46 tsg15 (dsg16 (S'self' p47 S'default' p48 tsg20 (dp49 g48 (I1 S'None' Ntsg47 (NNNtssg22 g23 sg24 S'' sg25 g43 sg26 g7 sg27 g46 sg28 (dsg29 g45 sg30 g31 sg42 g39 sbsS'tuple' p50 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp51 g4 ((dp52 (dp53 tsg15 (dsg16 (S'self' p54 tsg20 (dp55 g54 (NNNtssg22 g23 sg24 S'' sg25 g50 sg26 g7 sg27 g53 sg28 (dsg29 g52 sg30 g31 sg42 S'' sbsS'collectStates' p56 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp57 g4 ((dp58 (dp59 tsg15 (dsg16 (S'self' p60 S'transitions' p61 S'initialState' p62 S'finalStates' p63 tsg20 (dp64 g60 (NNNtsg61 (NNNtsg63 (NNNtsg62 (NNNtssg22 g23 sg24 S'' sg25 g56 sg26 g7 sg27 g59 sg28 (dsg29 g58 sg30 g31 sg42 S'' sbsS'complement' p65 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp66 g4 ((dp67 (dp68 tsg15 (dsg16 (S'self' p69 tsg20 (dp70 g69 (NNNtssg22 g23 sg24 S'' sg25 g65 sg26 g7 sg27
sg30 g31 sg42 g36 sbsS'labels' p71 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp72 g4 ((dp73 (dp74 tsg15 (dsg16 (S'self' p75 tsg20 (dp76 g75 (NNNtssg22 g23 sg24 S'Returns a list of transition labels.' sg25 g71 sg26 g7 sg27 g74 sg28 (dsg29 g73 sg30 g31 sg42 S'' sbsS'determinized' p77 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp78 g4 ((dp79 (dp80 tsg15 (dsg16 (S'self' p81 tsg20 (dp82 g81 (NNNtssg22 g23 sg24 S'Returns a deterministic FSA that accepts the same language.' sg25 g77 sg26 g7 sg27 g80 sg28 (dsg29 g79 sg30 g31 sg42 S'' sbsS'minimized' p83 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp84 g4 ((dp85 (dp86 tsg15 (dsg16 (S'self' p87 tsg20 (dp88 g87 (NNNtssg22 g23 sg24 S'Returns a minimal FSA that accepts the same language.' sg25 g83 sg26 g7 sg27 g86 sg28 (dsg29 g85 sg30 g31 sg42 S'' sbsS'initializeTransitionTables' p89 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp90 g4 ((dp91 (dp92 tsg15 (dsg16 (S'self' p93 tsg20 (dp94 g93 (NNNtssg22 g23 sg24 S'' sg25 g89 sg26 g7 sg27 g92 sg28 (dsg29 g91 sg30 g31 sg42 S'' sbsS'coerce' p95 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp96 g4 ((dp97 (dp98 tsg15 (dsg16 (S'self' p99 S'klass' p100 tsg20 (dp101 g99 (NNNtsg100 (NNNtssg22 g23 sg24 S'' sg25 g95 sg26 g7 sg27 g98 sg28 (dsg29 g97 sg30 g31 sg42 S'' sbsS'hasArcMetadata' p102 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp103 g4 ((dp104 (dp105 tsg15 (dsg16 (S'self' p106 tsg20 (dp107 g106 (NNNtssg22 g23 sg24 S'' sg25 g102 sg26 g7 sg27 g105 sg28 (dsg29 g104 sg30 g31 sg42 g37 sbsS'__str__' p108 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp109 g4 ((dp110 (dp111 tsg15 (dsg16 (S'self' p112 tsg20 (dp113 g112 (NNNtssg22 g23 sg24 S'' sg25 g108 sg26 g7 sg27 g111 sg28 (dsg29 g110 sg30 g31 sg42 S'' sbsS'stateLabelString' p114 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp115 g4 ((dp116 (dp117 tsg15 (dsg16 (S'self' p118 S'state' p119 tsg20 (dp120 g118 (NNNtsg119 (NNNtssg22 g23 sg24 S"A template method for specifying a state's label, for use in dot\n diagrams. If this returns None, the default (the string representation\n of the state) is used." sg25 g114 sg26 g7 sg27 g117 sg28 (dsg29 g116 sg30 g31 sg42 S'' sbsS'labelMatches' p121 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp122 g4 ((dp123 (dp124 tsg15 (dsg16 (S'self' p125 S'label' p126 S'input' p127 tsg20 (dp128 g127 (NNNtsg125 (NNNtsg126 (NNNtssg22 g23 sg24 S'' sg25 g121 sg26 g7 sg27 g124 sg28 (dsg29 g123 sg30 g31 sg42 g33 sbsS'computeEpsilonClosure' p129 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp130 g4 ((dp131 (dp132 tsg15 (dsg16 (S'self' p133 S'state' p134 tsg20 (dp135 g133 (NNNtsg134 (NNNtssg22 g23 sg24 S'' sg25 g129 sg26 g7 sg27 g132 sg28 (dsg29 g131 sg30 g31 sg42 S'' sbsS'sorted' p136 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp137 g4 ((dp138 (dp139 tsg15 (dsg16 (S'self' p140 S'initial' p141 tsg20 (dp142 g140 (NNNtsg141 (I1 S'0' Ntssg22 g23 sg24 S'' sg25 g136 sg26 g7 sg27 g139 sg28 (dsg29 g138 sg30 g31 sg42 g34 sbsS'copy' p143 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp144 g4 ((dp145 (dp146 tsg15 (dsg16 (S'self' p147 S'*args' p148 tsg20 (dp149 g148 (NNNtsg147 (NNNtssg22 g23 sg24 S'' sg25 g143 sg26 g7 sg27 g146 sg28 (dsg29 g145 sg30 g31 sg42 S'' sbsS'addArcMetadataFor' p150 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp151 g4 ((dp152 (dp153 tsg15 (dsg16 (S'self' p154 S'transition' p155 S'data' p156 tsg20 (dp157 g154 (NNNtsg155 (NNNtsg156 (NNNtssg22 g23 sg24 S'' sg25 g150 sg26 g7 sg27 g153 sg28 (dsg29 g152 sg30 g31 sg42 S'' sbsS'__init__' p158 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp159 g4 ((dp160 (dp161 tsg15 (dsg16 (S'self' p162 S'states' p163 S'alphabet' p164 S'transitions' p165 S'initialState' p166 S'finalStates' p167 S'arcMetadata' p168 tsg20 (dp169 g163 (NNNtsg167 (NNNtsg164 (NNNtsg162 (NNNtsg165 (NNNtsg168 (I1 S'[]' Ntsg166 (NNNtssg22 g23 sg24 S'' sg25 g158 sg26 g7 sg27 g161 sg28 (dsg29 g160 sg30 g31 sg42 S'' sbsS'getArcMetadata' p170 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp171 g4 ((dp172 (dp173 tsg15 (dsg16 (S'self' p174 tsg20 (dp175 g174 (NNNtssg22 g23 sg24 S'' sg25 g170 sg26 g7 sg27 g173 sg28 (dsg29 g172 sg30 g31 sg42 S'' sbsS'setArcMetadataFor' p176 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp177 g4 ((dp178 (dp179 tsg15 (dsg16 (S'self' p180 S'transition' p181 S'data' p182 tsg20 (dp183 g180 (NNNtsg181 (NNNtsg182 (NNNtssg22 g23 sg24 S'' sg25 g176 sg26 g7 sg27 g179 sg28 (dsg29 g178 sg30 g31 sg42 S'' sbsS'withoutEpsilons' p184 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp185 g4 ((dp186 (dp187 tsg15 (dsg16 (S'self' p188 tsg20 (dp189 g188 (NNNtssg22 g23 sg24 S'' sg25 g184 sg26 g7 sg27 g187 sg28 (dsg29 g186 sg30 g31 sg42 S'' sbsS'addArcMetadata' p190 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp191 g4 ((dp192 (dp193 tsg15 (dsg16 (S'self' p194 S'list' p195 tsg20 (dp196 g194 (NNNtsg195 (NNNtssg22 g23 sg24 S'' sg25 g190 sg26 g7 sg27 g193 sg28 (dsg29 g192 sg30 g31 sg42 S'' sbsS'epsilonClosure' p197 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp198 g4 ((dp199 (dp200 tsg15 (dsg16 (S'self' p201 S'state' p202 tsg20 (dp203 g201 (NNNtsg202 (NNNtssg22 g23 sg24 S'' sg25 g197 sg26 g7 sg27 g200 sg28 (dsg29 g199 sg30 g31 sg42 g41 sbsS'additionalTransitionInfoString' p204 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp205 g4 ((dp206 (dp207 tsg15 (dsg16 (S'self' p208 S'transition' p209 tsg20 (dp210 g208 (NNNtsg209 (NNNtssg22 g23 sg24 S'' sg25 g204 sg26 g7 sg27 g207 sg28 (dsg29 g206 sg30 g31 sg42 S'' sbsS'create' p211 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp212 g4 ((dp213 (dp214 tsg15 (dsg16 (S'self' p215 S'*args' p216 tsg20 (dp217 g216 (NNNtsg215 (NNNtssg22 g23 sg24 S'' sg25 g211 sg26 g7 sg27 g214 sg28 (dsg29 g213 sg30 g31 sg42 g35 sbsS'isEmpty' p218 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp219 g4 ((dp220 (dp221 tsg15 (dsg16 (S'self' p222 tsg20 (dp223 g222 (NNNtssg22 g23 sg24 S'' sg25 g218 sg26 g7 sg27 g221 sg28 (dsg29 g220 sg30 g31 sg42 g40 sbsS'accepts' p224 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp225 g4 ((dp226 (dp227 tsg15 (dsg16 (S'self' p228 S'sequence' p229 tsg20 (dp230 g228 (NNNtsg229 (NNNtssg22 g23 sg24 S'' sg25 g224 sg26 g7 sg27 g227 sg28 (dsg29 g226 sg30 g31 sg42 S'' sbsS'getArcMetadataFor' p231 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp232 g4 ((dp233 (dp234 tsg15 (dsg16 (S'self' p235 S'transition' p236 S'default' p237 tsg20 (dp238 g237 (I1 S'None' Ntsg235 (NNNtsg236 (NNNtssg22 g23 sg24 S'' sg25 g231 sg26 g7 sg27 g234 sg28 (dsg29 g233 sg30 g31 sg42 S'' sbsS'nextState' p239 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp240 g4 ((dp241 (dp242 tsg15 (dsg16 (S'self' p243 S'state' p244 S'input' p245 tsg20 (dp246 g245 (NNNtsg243 (NNNtsg244 (NNNtssg22 g23 sg24 S'' sg25 g239 sg26 g7 sg27 g242 sg28 (dsg29 g241 sg30 g31 sg42 S'' sbsS'trimmed' p247 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp248 g4 ((dp249 (dp250 tsg15 (dsg16 (S'self' p251 tsg20 (dp252 g251 (NNNtssg22 g23 sg24 S"Returns an equivalent FSA that doesn't include unreachable states,\n or states that only lead to dead states." sg25 g247 sg26 g7 sg27 g250 sg28 (dsg29 g249 sg30 g31 sg42 S'' sbsS'isFSA' p253 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp254 g4 ((dp255 (dp256 tsg15 (dsg16 (S'self' p257 tsg20 (dp258 g257 (NNNtssg22 g23 sg24 S'' sg25 g253 sg26 g7 sg27 g256 sg28 (dsg29 g255 sg30 g31 sg42 S'' sbsS'creationArgs' p259 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp260 g4 ((dp261 (dp262 tsg15 (dsg16 (S'self' p263 tsg20 (dp264 g263 (NNNtssg22 g23 sg24 S'' sg25 g259 sg26 g7 sg27 g262 sg28 (dsg29 g261 sg30 g31 sg42 S'' sbsS'__repr__' p265 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp266 g4 ((dp267 (dp268 tsg15 (dsg16 (S'self' p269 tsg20 (dp270 g269 (NNNtssg22 g23 sg24 S'' sg25 g265 sg26 g7 sg27 g268 sg28 (dsg29 g267 sg30 g31 sg42 g38 sbsS'setArcMetadata' p271 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp272 g4 ((dp273 (dp274 tsg15 (dsg16 (S'self' p275 S'list' p276 tsg20 (dp277 g275 (NNNtsg276 (NNNtssg22 g23 sg24 S'' sg25 g271 sg26 g7 sg27 g274 sg28 (dsg29 g273 sg30 g31 sg42 S'' sbsS'nextAvailableState' p278 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp279 g4 ((dp280 (dp281 tsg15 (dsg16 (S'self' p282 tsg20 (dp283 g282 (NNNtssg22 g23 sg24 S'' sg25 g278 sg26 g7 sg27 g281 sg28 (dsg29 g280 sg30 g31 sg42 S'' sbsS'computeEpsilonClosures' p284 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp285 g4 ((dp286 (dp287 tsg15 (dsg16 (S'self' p288 tsg20 (dp289 g288 (NNNtssg22 g23 sg24 S'' sg25 g284 sg26 g7 sg27 g287 sg28 (dsg29 g286 sg30 g31 sg42 S'' sbsS'view' p290 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp291 g4 ((dp292 (dp293 tsg15 (dsg16 (S'self' p294 tsg20 (dp295 g294 (NNNtssg22 g23 sg24 S'' sg25 g290 sg26 g7 sg27 g293 sg28 (dsg29 g292 sg30 g31 sg42 S'' sbsS'nextStateSet' p296 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp297 g4 ((dp298 (dp299 tsg15 (dsg16 (S'self' p300 S'states' p301 S'input' p302 tsg20 (dp303 g301 (NNNtsg302 (NNNtsg300 (NNNtssg22 g23 sg24 S'' sg25 g296 sg26 g7 sg27 g299 sg28 (dsg29 g298 sg30 g31 sg42 S'' sbsS'toDotString' p304 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp305 g4 ((dp306 (dp307 tsg15 (dsg16 (S'self' p308 tsg20 (dp309 g308 (NNNtssg22 g23 sg24 S'Returns a string that can be printed by the DOT tool at\n http://www.research.att.com/sw/tools/graphviz/ .' sg25 g304 sg26 g7 sg27 g307 sg28 (dsg29 g306 sg30 g31 sg42 S'' sbsS'transitionsFrom' p310 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp311 g4 ((dp312 (dp313 tsg15 (dsg16 (S'self' p314 S'state' p315 tsg20 (dp316 g314 (NNNtsg315 (NNNtssg22 g23 sg24 S'' sg25 g310 sg26 g7 sg27 g313 sg28 (dsg29 g312 sg30 g31 sg42 S'' sbstsg22 g23 sg24 S'' sS'_class_member_info' p317 (lsg25 g6 sg26 g2 sg27 g10 sg42 S'' sg28 (dsg29 g9 sg30 g31 sS'_base_class_info' p318 (lsbs(dp319 S'trim' p320 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp321 g4 ((dp322 (dp323 tsg15 (dsg16 (S'fsa' p324 tsg20 (dp325 g324 (NNNtssg22 g23 sg24 S'' sg25 g320 sg26 g2 sg27 g323 sg28 (dsg29 g322 sg30 g31 sg42 S'' sbsS'completion' p326 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp327 g4 ((dp328 (dp329 tsg15 (dsg16 (S'fsa' p330 tsg20 (dp331 g330 (NNNtssg22 g23 sg24 S'Returns an FSA that accepts the same language as the argument, but that\n lands in a defined state for every input.' sg25 g326 sg26 g2 sg27 g329 sg28 (dsg29 g328 sg30 g31 sg42 S'' sbsS'singleton' p332 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp333 g4 ((dp334 (dp335 tsg15 (dsg16 (S'symbol' p336 S'alphabet' p337 S'arcMetadata' p338 tsg20 (dp339 g337 (I1 S'None' Ntsg336 (NNNtsg338 (I1 S'None' Ntssg22 g23 sg24 S'' sg25 g332 sg26 g2 sg27 g335 sg28 (dsg29 g334 sg30 g31 sg42 S'' sbsS'option' p340 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp341 g4 ((dp342 (dp343 tsg15 (dsg16 (S'fsa' p344 tsg20 (dp345 g344 (NNNtssg22 g23 sg24 S'' sg25 g340 sg26 g2 sg27 g343 sg28 (dsg29 g342 sg30 g31 sg42 S'' sbsS'sequence' p346 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp347 g4 ((dp348 (dp349 tsg15 (dsg16 (S'sequence' p350 S'alphabet' p351 tsg20 (dp352 g351 (I1 S'None' Ntsg350 (NNNtssg22 g23 sg24 S'' sg25 g346 sg26 g2 sg27 g349 sg28 (dsg29 g348 sg30 g31 sg42 S'' sbsS'equivalent' p353 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp354 g4 ((dp355 (dp356 tsg15 (dsg16 (S'a' S'b' tsg20 (dp357 S'a' (NNNtsS'b' (NNNtssg22 g23 sg24 S'Return true ifff a and b accept the same language.' sg25 g353 sg26 g2 sg27 g356 sg28 (dsg29 g355 sg30 g31 sg42 S'' sbsS'unionLabelSets' p358 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp359 g4 ((dp360 (dp361 tsg15 (dsg16 (S'alist' p362 S'blist' p363 S'alphabet' p364 tsg20 (dp365 g364 (I1 S'None' Ntsg363 (NNNtsg362 (NNNtssg22 g23 sg24 S'' sg25 g358 sg26 g2 sg27 g361 sg28 (dsg29 g360 sg30 g31 sg42 S'' sbsS'symbolComplement' p366 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp367 g4 ((dp368 (dp369 tsg15 (dsg16 (S'symbol' p370 tsg20 (dp371 g370 (NNNtssg22 g23 sg24 S'' sg25 g366 sg26 g2 sg27 g369 sg28 (dsg29 g368 sg30 g31 sg42 S'' sbsS'concatenation' p372 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp373 g4 ((dp374 (dp375 tsg15 (dsg16 (S'a' S'*args' p376 tsg20 (dp377 S'a' (NNNtsg376 (NNNtssg22 g23 sg24 S'Returns an FSA that accepts the language consisting of the concatenation\n of strings recognized by the arguments.' sg25 g372 sg26 g2 sg27 g375 sg28 (dsg29 g374 sg30 g31 sg42 S'' sbsS'sort' p378 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp379 g4 ((dp380 (dp381 tsg15 (dsg16 (S'fsa' p382 tsg20 (dp383 g382 (NNNtssg22 g23 sg24 S'' sg25 g378 sg26 g2 sg27 g381 sg28 (dsg29 g380 sg30 g31 sg42 S'' sbsS'labelIntersection' p384 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp385 g4 ((dp386 (dp387 tsg15 (dsg16 (S'l1' p388 S'l2' p389 tsg20 (dp390 g389 (NNNtsg388 (NNNtssg22 g23 sg24 S'' sg25 g384 sg26 g2 sg27 g387 sg28 (dsg29 g386 sg30 g31 sg42 S'' sbsS'intersectLabelSets' p391 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp392 g4 ((dp393 (dp394 tsg15 (dsg16 (S'alist' p395 S'blist' p396 tsg20 (dp397 g396 (NNNtsg395 (NNNtssg22 g23 sg24 S'' sg25 g391 sg26 g2 sg27 g394 sg28 (dsg29 g393 sg30 g31 sg42 S'' sbsS'labelString' p398 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp399 g4 ((dp400 (dp401 tsg15 (dsg16 (S'label' p402 tsg20 (dp403 g402 (NNNtssg22 g23 sg24 S'' sg25 g398 sg26 g2 sg27 g401 sg28 (dsg29 g400 sg30 g31 sg42 S'' sbsS'compileItem' p404 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp405 g4 ((dp406 (dp407 tsg15 (dp408 S"'unimplemented'" Nssg16 (S'str' p409 S'index' p410 S'options' p411 tsg20 (dp412 g410 (NNNtsg411 (NNNtsg409 (NNNtssg22 g23 sg24 S'' sg25 g404 sg26 g2 sg27 g407 sg28 (dsg29 g406 sg30 g31 sg42 S'' sbsS'labelComplement' p413 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp414 g4 ((dp415 (dp416 tsg15 (dsg16 (S'label' p417 S'alphabet' p418 tsg20 (dp419 g418 (NNNtsg417 (NNNtssg22 g23 sg24 S'' sg25 g413 sg26 g2 sg27 g416 sg28 (dsg29 g415 sg30 g31 sg42 S'' sbsS'removeDuplicates' p420 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp421 g4 ((dp422 (dp423 tsg15 (dsg16 (S'sequence' p424 tsg20 (dp425 g424 (NNNtssg22 g23 sg24 S'' sg25 g420 sg26 g2 sg27 g423 sg28 (dsg29 g422 sg30 g31 sg42 S'' sbsS'difference' p426 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp427 g4 ((dp428 (dp429 tsg15 (dsg16 (S'a' S'b' tsg20 (dp430 S'a' (NNNtsS'b' (NNNtssg22 g23 sg24 S'Returns an FSA that accepts those strings accepted by the first\n argument, but not the second.' sg25 g426 sg26 g2 sg27 g429 sg28 (dsg29 g428 sg30 g31 sg42 S'' sbsS'compileREExpr' p431 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp432 g4 ((dp433 (dp434 tsg15 (dsg16 (S'str' p435 S'index' p436 S'options' p437 tsg20 (dp438 g436 (NNNtsg437 (NNNtsg435 (NNNtssg22 g23 sg24 S'' sg25 g431 sg26 g2 sg27 g434 sg28 (dsg29 g433 sg30 g31 sg42 S'' sbsS'complementLabelSet' p439 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp440 g4 ((dp441 (dp442 tsg15 (dsg16 (S'labels' p443 S'alphabet' p444 tsg20 (dp445 g444 (I1 S'None' Ntsg443 (NNNtssg22 g23 sg24 S'' sg25 g439 sg26 g2 sg27 g442 sg28 (dsg29 g441 sg30 g31 sg42 S'' sbsS'compileRE' p446 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp447 g4 ((dp448 (dp449 tsg15 (dp450 S"'extra ' + ` ')' `" Nssg16 (S's' S'**options' p451 tsg20 (dp452 S's' (NNNtsg451 (NNNtssg22 g23 sg24 S'' sg25 g446 sg26 g2 sg27 g449 sg28 (dsg29 g448 sg30 g31 sg42 S'' sbsS'closure' p453 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp454 g4 ((dp455 (dp456 tsg15 (dsg16 (S'arg' p457 tsg20 (dp458 g457 (NNNtssg22 g23 sg24 S'' sg25 g453 sg26 g2 sg27 g456 sg28 (dsg29 g455 sg30 g31 sg42 S'' sbsS'labelComplements' p459 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp460 g4 ((dp461 (dp462 tsg15 (dsg16 (S'label' p463 S'alphabet' p464 tsg20 (dp465 g464 (NNNtsg463 (NNNtssg22 g23 sg24 S'' sg25 g459 sg26 g2 sg27 g462 sg28 (dsg29 g461 sg30 g31 sg42 S'' sbsS'intersection' p466 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp467 g4 ((dp468 (dp469 tsg15 (dsg16 (S'a' S'b' tsg20 (dp470 S'a' (NNNtsS'b' (NNNtssg22 g23 sg24 S'Returns the intersection of two FSAs' sg25 g466 sg26 g2 sg27 g469 sg28 (dsg29 g468 sg30 g31 sg42 S'' sbsS'reverse' p471 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp472 g4 ((dp473 (dp474 tsg15 (dsg16 (S'fsa' p475 tsg20 (dp476 g475 (NNNtssg22 g23 sg24 S'' sg25 g471 sg26 g2 sg27 g474 sg28 (dsg29 g473 sg30 g31 sg42 S'' sbsS'determinize' p477 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp478 g4 ((dp479 (dp480 tsg15 (dsg16 (S'fsa' p481 tsg20 (dp482 g481 (NNNtssg22 g23 sg24 S'' sg25 g477 sg26 g2 sg27 g480 sg28 (dsg29 g479 sg30 g31 sg42 S'' sbsS'union' p483 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp484 g4 ((dp485 (dp486 tsg15 (dsg16 (S'*args' p487 tsg20 (dp488 g487 (NNNtssg22 g23 sg24 S'' sg25 g483 sg26 g2 sg27 g486 sg28 (dsg29 g485 sg30 g31 sg42 S'' sbsS'symbolIntersection' p489 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp490 g4 ((dp491 (dp492 tsg15 (dsg16 (S's1' p493 S's2' p494 tsg20 (dp495 g494 (NNNtsg493 (NNNtssg22 g23 sg24 S'' sg25 g489 sg26 g2 sg27 g492 sg28 (dsg29 g491 sg30 g31 sg42 S'' sbsS'compileSequence' p496 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp497 g4 ((dp498 (dp499 tsg15 (dsg16 (S'str' p500 S'index' p501 S'options' p502 tsg20 (dp503 g501 (NNNtsg502 (NNNtsg500 (NNNtssg22 g23 sg24 S'' sg25 g496 sg26 g2 sg27 g499 sg28 (dsg29 g498 sg30 g31 sg42 S'' sbsS'iteration' p504 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp505 g4 ((dp506 (dp507 tsg15 (dsg16 (S'fsa' p508 S'min' p509 S'max' p510 tsg20 (dp511 g508 (NNNtsg510 (I1 S'None' Ntsg509 (I1 S'1' Ntssg22 g23 sg24 S"\n >>> equivalent(iteration(singleton('a', 0, 2)), compileRE('|a|aa'))\n >>> equivalent(iteration(singleton('a', 1, 2)), compileRE('a|aa'))\n >>> equivalent(iteration(singleton('a', 1)), compileRE('aa*'))\n " sg25 g504 sg26 g2 sg27 g507 sg28 (dsg29 g506 sg30 g31 sg42 S'' sbsS'constructLabelMap' p512 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp513 g4 ((dp514 (dp515 tsg15 (dsg16 (S'labels' p516 S'alphabet' p517 S'includeComplements' p518 tsg20 (dp519 g517 (NNNtsg516 (NNNtsg518 (I1 S'0' Ntssg22 g23 sg24 S'Return a list of (newLabel, positives), where newLabel is an\n intersection of elements from labels and their complemens, and positives is\n a list of labels that have non-empty intersections with newLabel.' sg25 g512 sg26 g2 sg27 g515 sg28 (dsg29 g514 sg30 g31 sg42 S'' sbsS'toFSA' p520 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp521 g4 ((dp522 (dp523 tsg15 (dsg16 (S'arg' p524 tsg20 (dp525 g524 (NNNtssg22 g23 sg24 S'' sg25 g520 sg26 g2 sg27 g523 sg28 (dsg29 g522 sg30 g31 sg42 S'' sbsS'compileConjunction' p526 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp527 g4 ((dp528 (dp529 tsg15 (dsg16 (S'str' p530 S'index' p531 S'options' p532 tsg20 (dp533 g531 (NNNtsg532 (NNNtsg530 (NNNtssg22 g23 sg24 S'' sg25 g526 sg26 g2 sg27 g529 sg28 (dsg29 g528 sg30 g31 sg42 S'' sbsS'minimize' p534 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp535 g4 ((dp536 (dp537 tsg15 (dsg16 (S'fsa' p538 tsg20 (dp539 g538 (NNNtssg22 g23 sg24 S'' sg25 g534 sg26 g2 sg27 g537 sg28 (dsg29 g536 sg30 g31 sg42 S'' sbsS'consolidateTransitions' p540 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp541 g4 ((dp542 (dp543 tsg15 (dsg16 (S'transitions' p544 tsg20 (dp545 g544 (NNNtssg22 g23 sg24 S'' sg25 g540 sg26 g2 sg27 g543 sg28 (dsg29 g542 sg30 g31 sg42 S'' sbsS'containment' p546 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp547 g4 ((dp548 (dp549 tsg15 (dsg16 (S'arg' p550 S'occurrences' p551 tsg20 (dp552 g551 (I1 S'1' Ntsg550 (NNNtssg22 g23 sg24 S'Returns an FSA that matches sequences containing at least _count_\n occurrences\n of _symbol_.' sg25 g546 sg26 g2 sg27 g549 sg28 (dsg29 g548 sg30 g31 sg42 S'' sbsS'labelMatches' p553 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp554 g4 ((dp555 (dp556 tsg15 (dsg16 (S'label' p557 S'input' p558 tsg20 (dp559 g558 (NNNtsg557 (NNNtssg22 g23 sg24 S'' sg25 g553 sg26 g2 sg27 g556 sg28 (dsg29 g555 sg30 g31 sg42 S'' sbsS'_labelIntersection' p560 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp561 g4 ((dp562 (dp563 tsg15 (dsg16 (S'l1' p564 S'l2' p565 tsg20 (dp566 g565 (NNNtsg564 (NNNtssg22 g23 sg24 S'' sg25 g560 sg26 g2 sg27 g563 sg28 (dsg29 g562 sg30 g31 sg42 S'' sbsS'complement' p567 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp568 g4 ((dp569 (dp570 tsg15 (dsg16 (S'arg' p571 tsg20 (dp572 g571 (NNNtssg22 g23 sg24 S'Returns an FSA that accepts exactly those strings that the argument does\n not.' sg25 g567 sg26 g2 sg27 g570 sg28 (dsg29 g569 sg30 g31 sg42 S'' sbsS'view' p573 (ihappydoclib.parseinfo.functioninfo FunctionInfo (dp574 g4 ((dp575 (dp576 tsg15 (dsg16 (S'str' p577 tsg20 (dp578 g577 (NNNtssg22 g23 sg24 S'' sg25 g573 sg26 g2 sg27 g576 sg28 (dsg29 g575 sg30 g31 sg42 S'' sbstsS'_import_info' p579 (ihappydoclib.parseinfo.imports ImportInfo (dp580 S'_named_imports' p581 (dp582 S'types' (lp583 S'InstanceType' aS'ListType' aS'IntType' aS'LongType' assS'_straight_imports' p584 (lsbsg22 g23 sg24 S'""" methods to manipulate finite-state automata\n\nThis module defines an FSA class, for representing and operating on\nfinite-state automata (FSAs). FSAs can be used to represent regular\nexpressions and to test sequences for membership in the languages\ndescribed by regular expressions.\n\nFSAs can be deterministic or nondeterministic, and they can contain\nepsilon transitions. Methods to determinize an automaton (also\neliminating its epsilon transitions), and to minimize an automaton,\nare provided.\n\nThe transition labels for an FSA can be symbols from an alphabet, as\nin the standard formal definition of an FSA, but they can also be\ninstances which represent predicates. If these instances implement\ninstance.matches(), then the FSA nextState() function and accepts()\npredicate can be used. If they implement instance.complement() and\ninstance.intersection(), the FSA can be be determinized and minimized,\nto find a minimal deterministic FSA that accepts an equivalent\nlanguage.\n\n\nQuick Start\n----------\nInstances of FSA can be created out of labels (for instance, strings)\nby the singleton() function, and combined to create more complex FSAs\nthrough the complement(), closure(), concatenation(), union(), and\nother constructors. For example, concatenation(singleton(\'a\'),\nunion(singleton(\'b\'), closure(singleton(\'c\')))) creates an FSA that\naccepts the strings \'a\', \'ab\', \'ac\', \'acc\', \'accc\', and so on.\n\nInstances of FSA can also be created with the compileRE() function,\nwhich compiles a simple regular expression (using only \'*\', \'?\', \'+\',\n\'|\', \'(\', and \')\' as metacharacters) into an FSA. For example,\ncompileRE(\'a(b|c*)\') returns an FSA equivalent to the example in the\nprevious paragraph.\n\nFSAs can be determinized, to create equivalent FSAs (FSAs accepting\nthe same language) with unique successor states for each input, and\nminimized, to create an equivalent deterministic FSA with the smallest\nnumber of states. FSAs can also be complemented, intersected, unioned,\nand so forth as described under \'FSA Functions\' below.\n\n\nFSA Methods\n-----------\nThe class FSA defines the following methods.\n\nAcceptance\n``````````\nfsa.nextStates(state, input)\n returns a list of states\nfsa.nextState(state, input)\n returns None or a single state if\n |nextStates| <= 1, otherwise it raises an exception\nfsa.nextStateSet(states, input)\n returns a list of states\nfsa.accepts(sequence)\n returns true or false\n\nAccessors and predicates\n````````````````````````\nisEmpty()\n returns true iff the language accepted by the FSA is the empty language\nlabels()\n returns a list of labels that are used in any transition\nnextAvailableState()\n returns an integer n such that no states in the FSA\n are numeric values >= n\n\nReductions\n``````````\nsorted(initial=0)\n returns an equivalent FSA whose states are numbered\n upwards from 0\ndeterminized()\n returns an equivalent deterministic FSA\nminimized()\n returns an equivalent minimal FSA\ntrimmed()\n returns an equivalent FSA that contains no unreachable or dead\n states\n\nPresentation\n````````````\ntoDotString()\n returns a string suitable as *.dot file for the \'dot\'\n program from AT&T GraphViz\nview()\n views the FSA with a gs viewer, if gs and dot are installed\n\n\nFSA Functions\n------------\nConstruction from FSAs\n``````````````````````\ncomplement(a)\n returns an fsa that accepts exactly those sequences that its\n argument does not\nclosure(a)\n returns an fsa that accepts sequences composed of zero or more\n concatenations of sequences accepted by the argument\nconcatenation(a, b)\n returns an fsa that accepts sequences composed of a\n sequence accepted by a, followed by a sequence accepted by b\ncontainment(a, occurrences=1)\n returns an fsa that accepts sequences that\n contain at least occurrences occurrences of a subsequence recognized by the\n argument.\ndifference(a, b)\n returns an fsa that accepts those sequences accepted by a\n but not b\nintersection(a, b)\n returns an fsa that accepts sequences accepted by both a\n and b\niteration(a, min=1, max=None)\n returns an fsa that accepts sequences\n consisting of from min to max (or any number, if max is None) of sequences\n accepted by its first argument\noption(a)\n equivalent to union(a, EMPTY_STRING_FSA)\nreverse(a)\n returns an fsa that accepts strings whose reversal is accepted by\n the argument\nunion(a, b)\n returns an fsa that accepts sequences accepted by both a and b\n\nPredicates\n``````````\nequivalent(a, b)\n returns true iff a and b accept the same language\n\nReductions (these equivalent to the similarly-named methods)\n````````````````````````````````````````````````````````````\ndeterminize(fsa)\n returns an equivalent deterministic FSA\nminimize(fsa)\n returns an equivalent minimal FSA\nsort(fsa, initial=0)\n returns an equivalent FSA whose states are numbered from\n initial\ntrim(fsa)\n returns an equivalent FSA that contains no dead or unreachable\n states\n\nConstruction from labels\n````````````````````````\ncompileRE(string)\n returns an FSA that accepts the language described by\n string, where string is a list of symbols and \'*\', \'+\', \'?\', and \'|\' operators,\n with \'(\' and \')\' to control precedence.\nsequence(sequence)\n returns an fsa that accepts sequences that are matched by\n the elements of the argument. For example, sequence(\'abc\') returns an fsa that\n accepts \'abc\' and [\'a\', \'b\', \'c\'].\nsingleton(label)\n returns an fsa that accepts singletons whose elements are\n matched by label. For example, singleton(\'a\') returns an fsa that accepts only\n the string \'a\'.\n\n\nFSA Constants\n------------\nEMPTY_STRING_FSA is an FSA that accepts the language consisting only\nof the empty string.\n\nNULL_FSA is an FSA that accepts the null language.\n\nUNIVERSAL_FSA is an FSA that accepts S*, where S is any object.\n\n\nFSA instance creation\n---------------------\nFSA is initialized with a list of states, an alphabet, a list of\ntransition, an initial state, and a list of final states. If fsa is an\nFSA, fsa.tuple() returns these values in that order, i.e. (states,\nalphabet, transitions, initialState, finalStates). They\'re also\navailable as fields of fsa with those names.\n\nEach element of transition is a tuple of a start state, an end state,\nand a label: (startState, endSTate, label).\n\nIf the list of states is None, it\'s computed from initialState,\nfinalStates, and the states in transitions.\n\nIf alphabet is None, an open alphabet is used: labels are assumed to\nbe objects that implements label.matches(input), label.complement(),\nand label.intersection() as follows:\n\n - label.matches(input) returns true iff label matches input\n - label.complement() returnseither a label or a list of labels which,\n together with the receiver, partition the input alphabet\n - label.intersection(other) returns either None (if label and other don\'t\n both match any symbol), or a label that matches the set of symbols that\n both label and other match\n\nAs a special case, strings can be used as labels. If a strings \'a\' and\n\'b\' are used as a label and there\'s no alphabet, \'~a\' and \'~b\' are\ntheir respective complements, and \'~a&~b\' is the intersection of \'~a\'\nand \'~b\'. (The intersections of \'a\' and \'b\', \'a\' and \'~b\', and \'~a\'\nand \'b\' are, respectively, None, \'a\', and \'b\'.)\n\n\nGoals\n-----\nDesign Goals:\n\n- easy to use\n- easy to read (simple implementation, direct expression of algorithms)\n- extensible\n\nNon-Goals:\n\n- efficiency\n"""' sg25 S'fsa' sg26 Nsg27 g319 sg28 (dp585 S'include_comments' p586 I1 sS'cacheFilePrefix' p587 S'.happydoc.' p588 sS'useCache' p589 I1 sS'docStringFormat' p590 S'StructuredText' p591 ssg29 g5 sg30 g31 sg42 S'' sbt.
g68 sg28 (dsg29 g67
mfcc.py
from python_speech_features import mfcc import scipy.io.wavfile as wav import matplotlib.pyplot as plt from matplotlib import cm import numpy as np import os import random def load_noise(path='dat/_background_noise_/'): noise = [] files = os.listdir(path) for f in files: filename = f if ('wav' not in filename): continue f = os.path.join(path, f) (rate, sig) = wav.read(f) noise.append(sig) return noise def generate_mfcc(sig, rate, sig_len, noise=None, noise_weight=0.1, winlen=0.03125, winstep=0.03125/2, numcep=13, nfilt=26, nfft=512, lowfreq=20, highfreq=4000, winfunc=np.hanning, ceplifter=0, preemph=0.97): if(len(sig) != sig_len): if(len(sig)< sig_len): sig = np.pad(sig, (0, sig_len - len(sig)), 'constant') if(len(sig) >sig_len): sig = sig[0:sig_len] # i dont know, 'tensorflow' normalization sig = sig.astype('float') / 32768 if(noise is not None): noise = noise[random.randint(0, len(noise)-1)] # pick a noise start = random.randint(0, len(noise)-sig_len) # pick a sequence noise = noise[start:start+sig_len] noise = noise.astype('float')/32768 sig = sig * (1-noise_weight) + noise * noise_weight #wav.write('noise_test.wav', rate, sig) mfcc_feat = mfcc(sig, rate, winlen=winlen, winstep=winstep, numcep=numcep, nfilt=nfilt, nfft=nfft, lowfreq=lowfreq, highfreq=highfreq, winfunc=winfunc, ceplifter=ceplifter, preemph=preemph) mfcc_feat = mfcc_feat.astype('float32') return mfcc_feat def merge_mfcc_file(input_path='dat/', mix_noise=True, sig_len=16000, winlen=0.03125, winstep=0.03125/2, numcep=13, nfilt=26, nfft=512, lowfreq=20, highfreq=4000, winfunc=np.hanning, ceplifter=0, preemph=0.97): train_data = [] test_data = [] validate_data = [] train_lable = [] test_label = [] validate_label =[] if mix_noise: noise = load_noise() else: noise = None with open(input_path + 'testing_list.txt', 'r') as f: test_list = f.read() with open(input_path + 'validation_list.txt', 'r') as f: validate_list = f.read() files = os.listdir(input_path) for fi in files: fi_d = os.path.join(input_path, fi) # folders of each cmd if os.path.isdir(fi_d): label = fi_d.split('/')[1] # get the label from the dir print(label) # noise in training if 'noise' in label: for f in os.listdir(fi_d): filename = f if('wav' not in filename): continue f = os.path.join(fi_d, f) (rate, sig) = wav.read(f) for i in range(0, len(sig), sig_len): data = generate_mfcc(sig[i:i+sig_len], rate, sig_len, winlen=winlen, winstep=winstep, numcep=numcep, nfilt=nfilt, nfft=nfft, lowfreq=lowfreq, highfreq=highfreq, winfunc=winfunc, ceplifter=ceplifter, preemph=preemph) data = np.array(data) # ?? no idea why this works train_data.append(data) train_lable.append('noise') continue # dataset for f in os.listdir(fi_d): filename = f f = os.path.join(fi_d, f) (rate, sig) = wav.read(f) data = generate_mfcc(sig, rate, sig_len, noise=noise, winlen=winlen, winstep=winstep, numcep=numcep, nfilt=nfilt, nfft=nfft, lowfreq=lowfreq, highfreq=highfreq, winfunc=winfunc, ceplifter=ceplifter, preemph=preemph) data = np.array(data) # ?? no idea why this works # split dataset into train, test, validate if filename in test_list: test_data.append(data) test_label.append(label) elif filename in validate_list: validate_data.append(data) validate_label.append(label) else: train_data.append(data) train_lable.append(label) # finalize
test_data = np.array(test_data) validate_data = np.array(validate_data) return (train_data, train_lable), (test_data, test_label), (validate_data, validate_label) if __name__ == "__main__": # test (x_train, y_train), (x_test, y_test), (x_val, y_val) = merge_mfcc_file() np.save('train_data.npy', x_train) np.save('train_label.npy', y_train) np.save('test_data.npy', x_test) np.save('test_label.npy', y_test) np.save('val_data.npy', x_val) np.save('val_label.npy', y_val) print('x_train shape:', x_train.shape, 'max', x_train.max(), 'min', x_train.min()) mfcc_feat = x_train[3948] mfcc_feat = np.swapaxes(mfcc_feat, 0, 1) ig, ax = plt.subplots() cax = ax.imshow(mfcc_feat, interpolation='nearest', origin='lower', aspect='auto') ax.set_title('MFCC') plt.show()
train_data = np.array(train_data)
yyp_boss.rs
use super::{ directory_manager::DirectoryManager, errors::*, folders::*, pipelines::PipelineManager, utils, YyResource, YyResourceData, YyResourceHandler, YypSerialization, }; use crate::{FileSerializationError, ProjectMetadata, Resource}; use anyhow::{Context, Result as AnyResult}; use object_yy::Object; use shader::Shader; use std::{fs, path::Path}; use yy_typings::{ script::Script, sounds::Sound, sprite_yy::*, utils::{ResourceNameValidator, TrailingCommaUtility}, AnimationCurve, Font, Path as YyPath, Yyp, }; static TCU: once_cell::sync::Lazy<TrailingCommaUtility> = once_cell::sync::Lazy::new(TrailingCommaUtility::new); static RNV: once_cell::sync::Lazy<ResourceNameValidator> = once_cell::sync::Lazy::new(ResourceNameValidator::new); #[derive(Debug, PartialEq, Default)] pub struct YypBoss { pub directory_manager: DirectoryManager, pub pipeline_manager: PipelineManager, pub sprites: YyResourceHandler<Sprite>, pub scripts: YyResourceHandler<Script>, pub objects: YyResourceHandler<Object>, pub shaders: YyResourceHandler<Shader>, pub notes: YyResourceHandler<Note>, pub sounds: YyResourceHandler<Sound>, pub animation_curves: YyResourceHandler<AnimationCurve>, pub extensions: YyResourceHandler<Extension>, pub fonts: YyResourceHandler<Font>, pub paths: YyResourceHandler<YyPath>, pub rooms: YyResourceHandler<Room>, pub sequences: YyResourceHandler<Sequence>, pub tilesets: YyResourceHandler<TileSet>, pub timelines: YyResourceHandler<Timeline>, pub vfs: Vfs, yyp: Yyp, } impl YypBoss { /// Creates a new YyBoss Manager and performs startup file reading. pub fn new<P: AsRef<Path>>(path_to_yyp: P) -> Result<YypBoss, StartupError> { Self::with_startup_injest(path_to_yyp, &[]) } pub fn with_startup_injest<P: AsRef<Path>>( path_to_yyp: P, resources_to_scan: &[Resource], ) -> Result<YypBoss, StartupError> { let yyp: Yyp = utils::deserialize_json_tc(&path_to_yyp, &TCU).map_err(|e| match e { FileSerializationError::Serde(e) => StartupError::BadYypDeserialize(e), FileSerializationError::Io(error) => StartupError::BadYypPath { yyp_filepath: path_to_yyp.as_ref().to_owned(), error, }, })?; if yyp.meta_data.ide_version != Yyp::DEFAULT_VERSION { return Err(StartupError::YypIsWrongVersion( Yyp::DEFAULT_VERSION.to_string(), yyp.meta_data.ide_version, )); } let directory_manager = DirectoryManager::new(path_to_yyp.as_ref())?; let mut yyp_boss = Self { vfs: Vfs::new(&yyp.name), pipeline_manager: PipelineManager::new(&directory_manager), directory_manager, yyp, ..Self::default() }; // Load in Folders yyp_boss.vfs.load_in_folders(&yyp_boss.yyp.folders); // load in all of our resources... for yyp_resource in yyp_boss.yyp.resources.clone().into_iter() { let path_as_str = yyp_resource.id.path.to_string_lossy(); let subpath = path_as_str .split('/') .next() .ok_or_else(|| StartupError::BadResourceListing(yyp_resource.id.path.clone()))?; let resource = Resource::parse_subpath(subpath) .ok_or_else(|| StartupError::BadResourceListing(yyp_resource.id.path.clone()))?; let assoc = resources_to_scan.contains(&resource); match resource { Resource::Sprite => load_in_file::<Sprite>(yyp_resource, &mut yyp_boss, assoc), Resource::Script => load_in_file::<Script>(yyp_resource, &mut yyp_boss, assoc), Resource::Object => load_in_file::<Object>(yyp_resource, &mut yyp_boss, assoc), Resource::Note => load_in_file::<Note>(yyp_resource, &mut yyp_boss, assoc), Resource::Shader => load_in_file::<Shader>(yyp_resource, &mut yyp_boss, assoc), Resource::AnimationCurve => { load_in_file::<AnimationCurve>(yyp_resource, &mut yyp_boss, assoc) } Resource::Room => load_in_file::<Room>(yyp_resource, &mut yyp_boss, assoc), Resource::Extension => { load_in_file::<Extension>(yyp_resource, &mut yyp_boss, assoc) } Resource::Font => load_in_file::<Font>(yyp_resource, &mut yyp_boss, assoc), Resource::Path => load_in_file::<YyPath>(yyp_resource, &mut yyp_boss, assoc), Resource::Sequence => load_in_file::<Sequence>(yyp_resource, &mut yyp_boss, assoc), Resource::Sound => load_in_file::<Sound>(yyp_resource, &mut yyp_boss, assoc), Resource::TileSet => load_in_file::<TileSet>(yyp_resource, &mut yyp_boss, assoc), Resource::Timeline => load_in_file::<Timeline>(yyp_resource, &mut yyp_boss, assoc), }?; } return Ok(yyp_boss); fn load_in_file<T: YyResource>( yyp_resource: YypResource, yyp_boss: &mut YypBoss, load_in_associated_data: bool, ) -> Result<(), StartupError> { let yy_file_path = yyp_boss .directory_manager .root_directory() .join(&yyp_resource.id.path); let yy_file: T = utils::deserialize_json_tc(&yy_file_path, &TCU).map_err(|e| { StartupError::BadYyFile { filepath: yy_file_path, error: e.to_string(), } })?; yyp_boss .vfs .load_in_file(&yy_file, yyp_resource.order) .map_err(|e| StartupError::BadResourceTree { name: yy_file.name().to_owned(), error: e.to_string(), })?; let name = yy_file.name().to_owned(); let root_path = yyp_boss.directory_manager.root_directory().to_owned(); let handler = T::get_handler_mut(yyp_boss); handler.load_on_startup(yy_file); if load_in_associated_data { handler.load_resource_associated_data(&name, &root_path, &TCU)?; } Ok(()) } } /// Gets the default texture path, if it exists. The "Default" group simply /// has the name `"Default"`. /// /// This method will almost certainly be refactored soon to a dedicated TextureManager. pub fn default_texture_path(&self) -> Option<TexturePath> { self.yyp .texture_groups .iter() .find(|tex| tex.name == "Default") .map(|texture_group| texture_group.into()) } /// Serializes the YypBoss data to disk at the path of the Yyp. pub fn serialize(&mut self) -> AnyResult<()> { // serialize the vfs self.vfs .serialize(&mut self.yyp.folders, &mut self.yyp.resources); // serialize all the tracked components self.sprites.serialize(&self.directory_manager)?; self.objects.serialize(&self.directory_manager)?; self.scripts.serialize(&self.directory_manager)?; self.notes.serialize(&self.directory_manager)?; self.shaders.serialize(&self.directory_manager)?; // THESE DO NOT HAVE EXCELLENT TYPINGS YET. self.animation_curves.serialize(&self.directory_manager)?; self.extensions.serialize(&self.directory_manager)?; self.fonts.serialize(&self.directory_manager)?; self.paths.serialize(&self.directory_manager)?; self.rooms.serialize(&self.directory_manager)?; self.sequences.serialize(&self.directory_manager)?; self.sounds.serialize(&self.directory_manager)?; self.tilesets.serialize(&self.directory_manager)?; self.timelines.serialize(&self.directory_manager)?; // serialize the pipeline manifests self.pipeline_manager .serialize(&self.directory_manager) .context("serializing pipelines")?; // Serialize Ourselves: let string = self.yyp.yyp_serialization(0); fs::write(&self.directory_manager.yyp(), &string)?; Ok(()) } pub fn version_string(&self) -> &str { &self.yyp.meta_data.ide_version } pub fn project_metadata(&self) -> ProjectMetadata { ProjectMetadata { name: self.yyp.name.clone(), ide_version: self.yyp.meta_data.ide_version.clone(), yyp_version: self.yyp.resource_version, root_file: ViewPath { name: self.yyp.name.clone(), path: self.vfs.root_file_viewpath(), }, } } pub fn tcu(&self) -> &TrailingCommaUtility { &TCU } pub fn yyp(&self) -> &Yyp { &self.yyp } } // for generics impl YypBoss { /// Adds a new resource, which must not already exist within the project. pub fn add_resource<T: YyResource>( &mut self, yy_file: T, associated_data: T::AssociatedData, ) -> Result<(), ResourceManipulationError> { self.can_use_name(yy_file.name())?; if T::RESOURCE.can_manipulate() == false { return Err(ResourceManipulationError::ResourceCannotBeManipulated); } self.vfs.new_resource_end(&yy_file)?; let handler = T::get_handler_mut(self); if handler.set(yy_file, associated_data).is_some() { Err(ResourceManipulationError::InternalError) } else { Ok(()) } } /// Adds a new resource, which must not already exist within the project. pub fn remove_resource<T: YyResource>( &mut self, name: &str, ) -> Result<(T, Option<T::AssociatedData>), ResourceManipulationError> { // remove the file from the VFS... self.vfs.remove_resource(name, T::RESOURCE)?; let path = self.directory_manager.root_directory().to_path_buf(); let handler = T::get_handler_mut(self); handler .remove(name, &path, &TCU) .ok_or(ResourceManipulationError::InternalError) } /// Adds a new resource, which must not already exist within the project. pub fn rename_resource<T: YyResource>( &mut self, name: &str, new_name: String, ) -> Result<(), ResourceManipulationError> { // we cannot rename resources, since we cannot reserialize them... if T::RESOURCE.can_manipulate() == false { return Err(ResourceManipulationError::ResourceCannotBeManipulated); } // check to make sure the new name isn't taken... if let Some(value) = self.vfs.resource_names.get(&new_name) { return Err(ResourceManipulationError::NameCollision(value.resource)); } // check to make sure we're not dealing with some COMEDIANS if name == new_name { return Ok(()); } // rename the file in the VFS... self.vfs .rename_resource(name, T::RESOURCE, new_name.clone())?; let path = self.directory_manager.root_directory().to_path_buf(); let handler = T::get_handler_mut(self); handler .rename(name, new_name, &path, &TCU) .map_err(|_| ResourceManipulationError::InternalError)?; Ok(()) } pub fn can_use_name(&self, name: &str) -> Result<(), ResourceManipulationError> { if let Some(r) = self.vfs.resource_names.get(name) { return Err(ResourceManipulationError::NameCollision(r.resource)); } if RNV.is_valid(name) == false { return Err(ResourceManipulationError::BadName); } Ok(()) } /// Move a resource within the Asset Tree pub fn move_resource<T: YyResource>( &mut self, name: &str, new_parent: ViewPath, ) -> Result<(), ResourceManipulationError> { // cannot move them because we cannot reserialize them if T::RESOURCE.can_manipulate() == false { return Err(ResourceManipulationError::ResourceCannotBeManipulated); } // vfs self.vfs .move_resource(name, T::RESOURCE, &new_parent.path) .map_err(ResourceManipulationError::FolderGraphError)?; let handler = T::get_handler_mut(self); handler .edit_parent(name, new_parent) .map_err(|_| ResourceManipulationError::InternalError)?; Ok(()) } /// Gets a resource via the type. Users should probably not use this method unless they're doing /// some generic code. Instead, simply use each resources manager as appropriate -- for example, /// to get an object's data, use `yyp_boss.objects.get`. /// /// *Nb*: `YyResourceData` might not have any AssociatedData on it. See its warning on how Associated /// Data is held lazily. pub fn
<T: YyResource>(&self, name: &str) -> Option<&YyResourceData<T>> { let handler = T::get_handler(self); handler.get(name) } /// Ensures some associated data is loaded by generic type. If you aren't working generically, just access /// the individual handlers for this. /// /// If `force` is passed in, then this will *always* reload the associated data. Be careful out there -- hot /// reloading isn't a feature we really support yet. /// /// This operation will return a reference to the associated data if we succeeded. pub fn ensure_associated_data_is_loaded<T: YyResource>( &mut self, name: &str, force: bool, ) -> Result<(), YyResourceHandlerError> { // cannot move them because we cannot reserialize them let path = self.directory_manager.root_directory().to_path_buf(); let handler = T::get_handler_mut(self); let reload = handler .get(name) .map(|data| data.associated_data.is_none() || force) .unwrap_or(true); if reload { handler.load_resource_associated_data(name, &path, &TCU)?; } Ok(()) } } // resource handling! impl YypBoss { /// Move a resource within the Asset Tree, using the passed in resource type pub fn move_resource_dynamic( &mut self, name: &str, new_parent: ViewPath, resource: Resource, ) -> Result<(), ResourceManipulationError> { match resource { Resource::Sprite => self.move_resource::<Sprite>(name, new_parent), Resource::Script => self.move_resource::<Script>(name, new_parent), Resource::Object => self.move_resource::<Object>(name, new_parent), Resource::Note => self.move_resource::<Note>(name, new_parent), Resource::Shader => self.move_resource::<Shader>(name, new_parent), _ => Err(ResourceManipulationError::ResourceCannotBeManipulated), } } /// Removes a folder RECURSIVELY. **All resources within will be removed**. Be careful out there. pub fn remove_folder( &mut self, folder: &ViewPathLocation, ) -> Result<(), ResourceManipulationError> { // easy! if self.vfs.remove_empty_folder(folder).is_ok() { return Ok(()); } // okay okay, more complex operation let deleted_resources = self.vfs.remove_non_empty_folder(folder)?; for (fsys, descriptor) in deleted_resources { match descriptor.resource { Resource::Sprite => { self.sprites .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Script => { self.scripts .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Object => { self.objects .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Note => { self.notes .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Shader => { self.shaders .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::AnimationCurve => { self.animation_curves.remove( &fsys.name, self.directory_manager.root_directory(), &TCU, ); } Resource::Extension => { self.extensions.remove( &fsys.name, self.directory_manager.root_directory(), &TCU, ); } Resource::Font => { self.fonts .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Path => { self.paths .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Room => { self.rooms .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Sequence => { self.sequences.remove( &fsys.name, self.directory_manager.root_directory(), &TCU, ); } Resource::Sound => { self.sounds .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::TileSet => { self.tilesets .remove(&fsys.name, self.directory_manager.root_directory(), &TCU); } Resource::Timeline => { self.timelines.remove( &fsys.name, self.directory_manager.root_directory(), &TCU, ); } } } Ok(()) } }
get_resource
bitcoin_ca.ts
<?xml version="1.0" ?><!DOCTYPE TS><TS language="ca" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Bitcoin</source> <translation>A prop de ClockWorkCoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Bitcoin&lt;/b&gt; version</source> <translation type="unfinished"/> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation type="unfinished"/> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The Bitcoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Llibreta d&apos;adreçes</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copia la selecció actual al porta-retalls del sistema</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation type="unfinished"/> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Bitcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>These are your ClockWorkCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Bitcoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Bitcoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation type="unfinished"/> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Bitcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>These are your ClockWorkCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Address</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>(no label)</source> <translation type="unfinished"/> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation type="unfinished"/> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR BITCOINS&lt;/b&gt;!</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation type="unfinished"/> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation type="unfinished"/> </message> <message> <location line="-56"/> <source>Bitcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your bitcoins from being stolen by malware infecting your computer.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation type="unfinished"/> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation type="unfinished"/> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation type="unfinished"/> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation type="unfinished"/> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation type="unfinished"/> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation type="unfinished"/> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Quit application</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Show information about Bitcoin</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation type="unfinished"/> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a Bitcoin address</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Modify configuration options for Bitcoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation type="unfinished"/> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation type="unfinished"/> </message> <message> <location line="-165"/> <location line="+530"/> <source>Bitcoin</source> <translation type="unfinished"/> </message> <message> <location line="-530"/> <source>Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About Bitcoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your Bitcoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Bitcoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation type="unfinished"/> </message> <message> <location line="+47"/> <source>Bitcoin client</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Bitcoin network</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation type="unfinished"/> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation type="unfinished"/> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Bitcoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Bitcoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>New sending address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation type="unfinished"/> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Bitcoin address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation type="unfinished"/> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Bitcoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Automatically start Bitcoin after logging in to the system.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Start Bitcoin on system login</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation type="unfinished"/> </message> <message> <location line="+6"/>
</message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Connect to the Bitcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation type="unfinished"/> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Bitcoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Whether to show Bitcoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation type="unfinished"/> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Bitcoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation type="unfinished"/> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Bitcoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-124"/> <source>Balance:</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation type="unfinished"/> </message> <message> <location line="-78"/> <source>Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation type="unfinished"/> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation type="unfinished"/> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start bitcoin: click-to-pay handler</source> <translation>Cannot start ClockWorkCoin: click-to-pay handler</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Amount:</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Label:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Message:</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation type="unfinished"/> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation type="unfinished"/> </message> <message> <location line="-217"/> <source>Client version</source> <translation type="unfinished"/> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation type="unfinished"/> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation type="unfinished"/> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the Bitcoin-Qt help message to get a list with possible Bitcoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation type="unfinished"/> </message> <message> <location line="-260"/> <source>Build date</source> <translation type="unfinished"/> </message> <message> <location line="-104"/> <source>Bitcoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Bitcoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Open the Bitcoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation type="unfinished"/> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Bitcoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Balance:</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>123.456 CWC</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source> and </source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation type="unfinished"/> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation type="unfinished"/> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Bitcoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation type="unfinished"/> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Bitcoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation type="unfinished"/> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Enter Bitcoin signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation type="unfinished"/> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation type="unfinished"/> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Bitcoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>Status</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation type="unfinished"/> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation type="unfinished"/> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Net amount</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Message</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Comment</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>true</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>false</source> <translation type="unfinished"/> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Type</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Address</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Amount</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation type="unfinished"/> </message> <message> <location line="+43"/> <source>Received with</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Received from</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sent to</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Mined</source> <translation type="unfinished"/> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation type="unfinished"/> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation type="unfinished"/> </message> <message> <location line="-15"/> <source>Today</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This week</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This month</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Last month</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This year</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Range...</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Received with</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Sent to</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>To yourself</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Mined</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Other</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Min amount</source> <translation type="unfinished"/> </message> <message> <location line="+34"/> <source>Copy address</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation type="unfinished"/> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Date</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Type</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Label</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Address</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Amount</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>ID</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <source>Range:</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>to</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Bitcoin version</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>Send command to -server or bitcoind</source> <translation type="unfinished"/> </message> <message> <location line="-23"/> <source>List commands</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Options:</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>Specify configuration file (default: bitcoin.conf)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Specify pid file (default: bitcoind.pid)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation type="unfinished"/> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 11025 or testnet: 5744)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation type="unfinished"/> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 21025 or testnet: 5745)</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation type="unfinished"/> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=bitcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Bitcoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Bitcoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Bitcoin will not work properly.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation type="unfinished"/> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation type="unfinished"/> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation type="unfinished"/> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation type="unfinished"/> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+165"/> <source>This help message</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation type="unfinished"/> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation type="unfinished"/> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation type="unfinished"/> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Bitcoin</source> <translation type="unfinished"/> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Bitcoin to complete</source> <translation type="unfinished"/> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation type="unfinished"/> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation type="unfinished"/> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation type="unfinished"/> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Bitcoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation type="unfinished"/> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation type="unfinished"/> </message> <message> <location line="-57"/> <source>Done loading</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation type="unfinished"/> </message> </context> </TS>
<source>Automatically open the Bitcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/>
41-default-metadata-object.spec.js
import testingDB from 'api/utils/testing_db'; import migration from '../index.js'; import fixtures from './fixtures.js'; describe('migration default-metadata-object', () => { beforeEach(async () => { spyOn(process.stdout, 'write'); await testingDB.clearAllAndLoad(fixtures); });
it('should have a delta number', () => { expect(migration.delta).toBe(41); }); it('should add an empty object to all entities without metadata', async () => { await migration.up(testingDB.mongodb); const entities = await testingDB.mongodb .collection('entities') .find({}) .sort({ title: 1 }) .toArray(); expect(entities[0].metadata).toEqual({}); expect(entities[1].metadata).toEqual({}); expect(entities[2].metadata).toEqual({}); expect(entities[3].metadata).toEqual({ some_prop: [{ value: 42 }] }); }); });
afterAll(async () => { await testingDB.disconnect(); });
tasks_messageboard.go
package marketplace import ( "github.com/jasonlvhit/gocron" ) func StartMessageboardCron()
{ gocron.Every(5).Minutes().Do(RefreshViewThreadsMaterializedView) }
ModelManager.js
/** * This class has been deprecated. Use `Ext.data.schema.Schema` instead. */ Ext.define('Ext.data.ModelManager', { alternateClassName: 'Ext.ModelMgr', requires: [ 'Ext.data.schema.Schema' ], singleton: true, deprecated: { 5: { methods: { clear: null, create: function (data, name, id) { var T = name; if (!T.isEntity) { T = this.getModel(name || data.name); } return T.createWithId(id, data); }, each: function(fn, scope) { Ext.data.Model.schema.eachEntity(fn, scope); }, get: function(name) { return this.getModel(name); }, getCount: function() { return Ext.data.Model.schema.entityCount; }, /** * @method getModel * Returns the {@link Ext.data.Model} class for a given model name * @param {String/Object} id The classname of the model or the model class itself. * @return {Ext.data.Model} a model class. * @deprecated 5.0 Use {@link Ext.data.schema.Schema#lookupEntity} instead. */ getModel: function (id) {
return !!this.getModel(name); } } } } });
return Ext.data.schema.Schema.lookupEntity(id); }, isRegistered: function(name) {
test.rs
use keep_awake::inhibit; use std::time::Duration; fn main() -> Result<(), Box<dyn std::error::Error>> { println!("Here!"); let _obj = inhibit("Plumo Snark steup", "very important")?; // drop(obj); std::thread::sleep(Duration::from_millis(2000 * 1000)); Ok(())
}
unique-operation-name.spec.ts
import { GraphQLRuleTester, ParserOptions } from '../src'; import rule from '../src/rules/unique-operation-name'; import { join } from 'path'; const TEST_OPERATION = `query test { foo }`; const SIBLING_OPERATIONS = (...operations: string[]) => ({ parserOptions: <ParserOptions>{ operations, }, }); const ruleTester = new GraphQLRuleTester(); ruleTester.runGraphQLTests('unique-operation-name', rule, { valid: [ {
code: `query test2 { foo }`, }, { // Compare filepath of code as real instead of virtual with siblings ...SIBLING_OPERATIONS(join(__dirname, 'mocks/unique-fragment.js')), filename: join(__dirname, 'mocks/unique-fragment.js/1_document.graphql'), code: /* GraphQL */ ` query User { user { ...UserFields } } `, }, ], invalid: [ { ...SIBLING_OPERATIONS(TEST_OPERATION), code: `query test { bar }`, errors: [{ messageId: 'UNIQUE_OPERATION_NAME' }], }, { ...SIBLING_OPERATIONS(TEST_OPERATION, `query test { bar2 }`), code: `query test { bar }`, errors: [{ messageId: 'UNIQUE_OPERATION_NAME' }], }, ], });
...SIBLING_OPERATIONS(TEST_OPERATION),
ss_idealization.py
from __future__ import division # LIBTBX_SET_DISPATCHER_NAME mmtbx.ss_idealization from mmtbx.secondary_structure import build as ssb import mmtbx.model import iotbx.pdb import os, sys def
(args): pdb_inp = iotbx.pdb.input(source_info=None, file_name=args[0]) model = mmtbx.model.manager( model_input=pdb_inp) params = ssb.master_phil.extract() params.ss_idealization.file_name_before_regularization="before_reg.pdb" params.ss_idealization.enabled=True rm = ssb.substitute_ss( model = model, params = params, log=sys.stdout) out_fname = "%s_ss_ideal.pdb" % os.path.basename(args[0]) txt = model.model_as_pdb() with open(out_fname, 'w') as f: f.write(txt) print "File saved: %s" % out_fname print "All done." if __name__ == "__main__" : run(sys.argv[1:])
run
token-error-correct-3.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that we do some basic error correcton in the tokeniser (and don't spew // too many bogus errors). pub mod raw { use std::{io, fs}; use std::path::Path; pub fn ensure_dir_exists<P: AsRef<Path>, F: FnOnce(&Path)>(path: P, callback: F) -> io::Result<bool>
} fn main() {}
{ if !is_directory(path.as_ref()) { //~ ERROR: unresolved function `is_directory` //~^ NOTE: no resolution found callback(path.as_ref(); //~ NOTE: unclosed delimiter //~^ ERROR: expected one of fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types //~^ expected (), found enum `std::result::Result` //~| expected type `()` //~| found type `std::result::Result<bool, std::io::Error>` } else { //~ ERROR: incorrect close delimiter: `}` //~^ ERROR: expected one of Ok(false); } panic!(); }
deploy_test.go
// Copyright 2019 OpenFaaS Author(s) // Licensed under the MIT license. See LICENSE file in the project root for full license information. package handlers import ( "testing" "github.com/openfaas/faas-netes/k8s" types "github.com/openfaas/faas-provider/types" "k8s.io/client-go/kubernetes/fake" apiv1 "k8s.io/api/core/v1" ) func Test_buildAnnotations_Empty_In_CreateRequest(t *testing.T)
func Test_buildAnnotations_Premetheus_NotOverridden(t *testing.T) { request := types.FunctionDeployment{Annotations: &map[string]string{"prometheus.io.scrape": "true"}} annotations := buildAnnotations(request) if len(annotations) != 1 { t.Errorf("want: %d annotations got: %d", 1, len(annotations)) } v, ok := annotations["prometheus.io.scrape"] if !ok { t.Errorf("missing prometheus.io.scrape key") } want := "true" if v != want { t.Errorf("want: %s for annotation prometheus.io.scrape got: %s", want, v) } } func Test_buildAnnotations_From_CreateRequest(t *testing.T) { request := types.FunctionDeployment{ Annotations: &map[string]string{ "date-created": "Wed 25 Jul 21:26:22 BST 2018", "foo": "bar", }, } annotations := buildAnnotations(request) if len(annotations) != 3 { t.Errorf("want: %d annotations got: %d", 1, len(annotations)) } v, ok := annotations["date-created"] if !ok { t.Errorf("missing date-created key") } if v != "Wed 25 Jul 21:26:22 BST 2018" { t.Errorf("want: %s for annotation date-created got: %s", "Wed 25 Jul 21:26:22 BST 2018", v) } } func Test_SetNonRootUser(t *testing.T) { scenarios := []struct { name string setNonRoot bool }{ {"does not set userid value when SetNonRootUser is false", false}, {"does set userid to constant value when SetNonRootUser is true", true}, } for _, s := range scenarios { t.Run(s.name, func(t *testing.T) { request := types.FunctionDeployment{Service: "testfunc", Image: "alpine:latest"} factory := k8s.NewFunctionFactory(fake.NewSimpleClientset(), k8s.DeploymentConfig{ LivenessProbe: &k8s.ProbeConfig{}, ReadinessProbe: &k8s.ProbeConfig{}, SetNonRootUser: s.setNonRoot, }) deployment, err := makeDeploymentSpec(request, map[string]*apiv1.Secret{}, factory) if err != nil { t.Errorf("unexpected makeDeploymentSpec error: %s", err.Error()) } functionContainer := deployment.Spec.Template.Spec.Containers[0] if functionContainer.SecurityContext == nil { t.Errorf("expected container %s to have a non-nil security context", functionContainer.Name) } if !s.setNonRoot && functionContainer.SecurityContext.RunAsUser != nil { t.Errorf("expected RunAsUser to be nil, got %d", functionContainer.SecurityContext.RunAsUser) } if s.setNonRoot && *functionContainer.SecurityContext.RunAsUser != k8s.SecurityContextUserID { t.Errorf("expected RunAsUser to be %d, got %d", k8s.SecurityContextUserID, functionContainer.SecurityContext.RunAsUser) } }) } } func Test_buildEnvVars_NoSortedKeys(t *testing.T) { inputEnvs := map[string]string{} function := types.FunctionDeployment{ EnvVars: inputEnvs, } coreEnvs := buildEnvVars(&function) if len(coreEnvs) != 0 { t.Errorf("want: %d env-vars, got: %d", 0, len(coreEnvs)) t.Fail() } } func Test_buildEnvVars_TwoSortedKeys(t *testing.T) { firstKey := "first" lastKey := "last" inputEnvs := map[string]string{ lastKey: "", firstKey: "", } function := types.FunctionDeployment{ EnvVars: inputEnvs, } coreEnvs := buildEnvVars(&function) if coreEnvs[0].Name != firstKey { t.Errorf("first want: %s, got: %s", firstKey, coreEnvs[0].Name) t.Fail() } } func Test_buildEnvVars_FourSortedKeys(t *testing.T) { firstKey := "alex" secondKey := "elliot" thirdKey := "stefan" lastKey := "zane" inputEnvs := map[string]string{ lastKey: "", firstKey: "", thirdKey: "", secondKey: "", } function := types.FunctionDeployment{ EnvVars: inputEnvs, } coreEnvs := buildEnvVars(&function) if coreEnvs[0].Name != firstKey { t.Errorf("first want: %s, got: %s", firstKey, coreEnvs[0].Name) t.Fail() } if coreEnvs[1].Name != secondKey { t.Errorf("second want: %s, got: %s", secondKey, coreEnvs[1].Name) t.Fail() } if coreEnvs[2].Name != thirdKey { t.Errorf("third want: %s, got: %s", thirdKey, coreEnvs[2].Name) t.Fail() } if coreEnvs[3].Name != lastKey { t.Errorf("last want: %s, got: %s", lastKey, coreEnvs[3].Name) t.Fail() } }
{ request := types.FunctionDeployment{} annotations := buildAnnotations(request) if len(annotations) != 1 { t.Errorf("want: %d annotations got: %d", 1, len(annotations)) } v, ok := annotations["prometheus.io.scrape"] if !ok { t.Errorf("missing prometheus.io.scrape key") } want := "false" if v != want { t.Errorf("want: %s for annotation prometheus.io.scrape got: %s", want, v) } }
abi.rs
use crate::build_solidity; #[test] fn
() { let mut vm = build_solidity( r#" struct s { int32 f1; uint8 f2; string f3; uint16[2] f4; } contract bar { function test() public { uint16 a = 0xfd01; assert(abi.encodePacked(a) == hex"fd01"); uint32 b = 0xaabbccdd; assert(abi.encodePacked(true, b, false) == hex"01aabbccdd00"); } function test2() public { string b = "foobar"; assert(abi.encodePacked(b) == "foobar"); assert(abi.encodePacked("foobar") == "foobar"); assert(abi.encodePacked("foo", "bar") == "foobar"); } function test3() public { s x = s({ f1: 511, f2: 0xf7, f3: "testie", f4: [ 4, 5 ] }); assert(abi.encodePacked(x) == hex"000001fff774657374696500040005"); } }"#, ); vm.constructor("bar", &[]); vm.function("test", &[]); vm.function("test2", &[]); vm.function("test3", &[]); }
packed
main.rs
extern crate pretty_env_logger; #[macro_use] extern crate log; extern crate failure; extern crate actix; extern crate actix_ogn; extern crate actix_web; extern crate futures; extern crate chrono; extern crate r2d2_redis; extern crate regex; #[macro_use] extern crate lazy_static; extern crate itertools; extern crate sentry; extern crate serde; #[macro_use] extern crate serde_derive; extern crate bincode; extern crate serde_json; extern crate systemstat; #[cfg(test)] #[macro_use] extern crate approx; use actix::*; use actix_ogn::OGNActor; use actix_web::server::HttpServer; use r2d2_redis::RedisConnectionManager;
use std::env; mod api; mod app; mod gateway; mod geo; mod ogn; mod ogn_ddb; mod redis; mod units; mod ws_client; use app::build_app; use gateway::Gateway; use ogn_ddb::OGNDevicesUpdater; use redis::RedisExecutor; const REDIS_WORKERS: usize = 7; fn main() { // reads sentry DSN from `SENTRY_DSN` environment variable let _sentry = sentry::init(()); sentry::integrations::panic::register_panic_handler(); setup_logging(); let redis_url = env::var("REDIS_URL").expect("REDIS_URL must be set"); let redis_url = r2d2_redis::redis::parse_redis_url(&redis_url).unwrap(); let sys = actix::System::new("ogn-web-gateway"); let redis_connection_manager = RedisConnectionManager::new(redis_url).unwrap(); let redis_pool = r2d2_redis::r2d2::Pool::builder() .build(redis_connection_manager) .unwrap(); let redis_executor_addr = SyncArbiter::start(REDIS_WORKERS, move || { RedisExecutor::new(redis_pool.clone()) }); let updater_redis_addr = redis_executor_addr.clone(); let _ogn_device_updater_addr = Arbiter::start(|_| OGNDevicesUpdater { redis: updater_redis_addr, }); // Start "gateway" actor in separate thread let gateway_redis_addr = redis_executor_addr.clone(); let gateway: Addr<_> = Arbiter::start(|_| Gateway::new(gateway_redis_addr)); // Start OGN client in separate thread let gw = gateway.clone(); let _ogn_addr: Addr<_> = Supervisor::start(|_| OGNActor::new(gw.recipient())); // Create Http server with websocket support HttpServer::new(move || build_app(redis_executor_addr.clone(), gateway.clone())) .bind("127.0.0.1:8080") .unwrap() .start(); sys.run(); } fn setup_logging() { let mut log_builder = pretty_env_logger::formatted_builder().unwrap(); if let Ok(s) = env::var("RUST_LOG") { log_builder.parse(&s); } let logger = log_builder.build(); let options = sentry::integrations::log::LoggerOptions { global_filter: Some(logger.filter()), ..Default::default() }; sentry::integrations::log::init(Some(Box::new(logger)), options); }
testcase.py
# coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- import functools from devtools_testutils import AzureTestCase, PowerShellPreparer from azure.purview.administration.account import PurviewAccountClient from azure.purview.administration.metadatapolicies import PurviewMetadataPoliciesClient class PurviewAccountTest(AzureTestCase): def __init__(self, method_name, **kwargs): super(PurviewAccountTest, self).__init__(method_name, **kwargs) def create_client(self, endpoint): credential = self.get_credential(PurviewAccountClient) return self.create_client_from_credential( PurviewAccountClient, credential=credential, endpoint=endpoint, ) PurviewAccountPowerShellPreparer = functools.partial( PowerShellPreparer, "purviewaccount",
class PurviewMetaPolicyTest(AzureTestCase): def __init__(self, method_name, **kwargs): super(PurviewMetaPolicyTest, self).__init__(method_name, **kwargs) def create_client(self, endpoint): credential = self.get_credential(PurviewMetadataPoliciesClient) return self.create_client_from_credential( PurviewMetadataPoliciesClient, credential=credential, endpoint=endpoint, ) PurviewMetaPolicyPowerShellPreparer = functools.partial( PowerShellPreparer, "purviewmetapolicy", purviewmetapolicy_endpoint="https://fake_account.account.purview.azure.com" )
purviewaccount_endpoint="https://fake_account.account.purview.azure.com" )
setup.py
""" Linux on Hyper-V and Azure Test Code, ver. 1.0.0 Copyright (c) Microsoft Corporation All rights reserved Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. """ import os import sys import time import logging from utils import constants from utils.cmdshell import SSHClient from report.db_utils import upload_results from paramiko.ssh_exception import NoValidConnectionsError from providers.amazon_service import AWSConnector from providers.azure_service import AzureConnector from providers.gcp_service import GCPConnector logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', datefmt='%y/%m/%d %H:%M:%S', level=logging.INFO) log = logging.getLogger(__name__) class SetupTestEnv: """ Setup test environment. """ def __init__(self, provider=None, vm_count=None, test_type=None, disk_size=None, raid=None, keyid=None, secret=None, token=None, subscriptionid=None, tenantid=None, projectid=None, imageid=None, instancetype=None, user=None, localpath=None, region=None, zone=None, sriov=False, kernel=None): """ Init AWS connector to create and configure AWS ec2 instances. :param provider Service provider to be used e.g. azure, aws, gce. :param vm_count: Number of VMs to prepare :param test_type: vm_disk > 1 VM with disk (Orion and Sysbench) no_disk > No disk attached (Redis, Memcached, Apache_bench) db_disk > Second VM with disk (MariaDB, MongoDB) cluster_disk > All VMs have disks (Terasort) :param disk_size: :param raid: Bool or Int (the number of disks), to specify if a RAID will be configured :param keyid: user key for executing remote connection :param secret: user secret for executing remote connection :param token: GCE refresh token obtained with gcloud sdk :param subscriptionid: Azure specific subscription id :param tenantid: Azure specific tenant id :param projectid: GCE specific project id :param imageid: AWS OS AMI image id or Azure image references offer and sku: e.g. 'UbuntuServer#16.04.0-LTS' or GCE image family, e.g. 'ubuntu-1604-lts' :param instancetype: AWS instance resource type e.g 'd2.4xlarge' or Azure hardware profile vm size e.g. 'Standard_DS14_v2' or GCE instance size e.g. 'n1-highmem-16' :param user: remote ssh user for the instance :param localpath: localpath where the logs should be downloaded, and the default path for other necessary tools :param region: region to connect to :param zone: zone where other resources should be available :param sriov: bool for configuring SR-IOV or not :param kernel: kernel deb name to install provided in localpath :rtype Tuple :return: connector <Connector>, vm_ips <VM private IPs dict>, device <attached disk devices>, ssh_client <ssh clients dict> """ self.provider = provider self.vm_count = vm_count self.test_type = test_type self.disk_size = disk_size self.raid = raid self.keyid = keyid self.secret = secret self.token = token self.subscriptionid = subscriptionid self.tenantid = tenantid self.projectid = projectid self.imageid = imageid self.instancetype = instancetype self.user = user self.localpath = localpath self.region = region self.zone = zone self.sriov = sriov self.kernel = kernel # create and generate setup details try: self.connector = self.create_connector() self.vms = self.create_instances() self.device = self.get_disk_devices() self.ssh_client, self.vm_ips = self.get_instance_details() self.perf_tuning() self.reconnect_sshclient() except Exception as e: log.exception(e) if self.connector: self.connector.teardown() raise def create_connector(self): """ Create connector by provider. :return: connector """ connector = None if self.provider == constants.AWS: connector = AWSConnector(keyid=self.keyid, secret=self.secret, imageid=self.imageid, instancetype=self.instancetype, user=self.user, localpath=self.localpath, region=self.region, zone=self.zone) elif self.provider == constants.AZURE: connector = AzureConnector(clientid=self.keyid, secret=self.secret, subscriptionid=self.subscriptionid, tenantid=self.tenantid, imageid=self.imageid, instancetype=self.instancetype, user=self.user, localpath=self.localpath, location=self.region, sriov=self.sriov) elif self.provider == constants.GCE: connector = GCPConnector(clientid=self.keyid, secret=self.secret, token=self.token, projectid=self.projectid, imageid=self.imageid, instancetype=self.instancetype, user=self.user, localpath=self.localpath, zone=self.zone) if connector: connector.connect() return connector else: raise Exception('Unsupported provider or connector failed.') def create_instances(self): """ Create instances. :return: VM instances """ open(self.connector.host_key_file, 'w').close() vms = {} for i in xrange(1, self.vm_count + 1): vms[i] = self.connector.create_vm() return vms def reconnect_sshclient(self): if self.provider == constants.AWS: log.info('The provider is AWS, reconnect sshclient') for i in xrange(1, self.vm_count + 1): self.ssh_client[i].connect() def get_instance_details(self): """ Create ssh client and get vm IPs :return: ssh_client, vm_ips """ ssh_client = {} vm_ips = {} for i in xrange(1, self.vm_count + 1): if self.provider == constants.AWS: ssh_client[i] = self.connector.wait_for_ping(self.vms[i]) # SRIOV is enabled by default on AWS for the tested platforms # if sriov == constants.ENABLED: # ssh_client[i] = connector.enable_sr_iov(vms[i], ssh_client[i]) self.vms[i].update() vm_ips[i] = self.vms[i].private_ip_address elif self.provider == constants.AZURE: ssh_client[i] = SSHClient(server=self.vms[i].name + self.connector.dns_suffix, host_key_file=self.connector.host_key_file, user=self.connector.user, ssh_key_file=os.path.join( self.connector.localpath, self.connector.key_name + '.pem')) ip = ssh_client[i].run( 'ifconfig eth0 | grep "inet" | cut -d: -f2 | awk -F " " \'{print $2}\' | head -n 1') log.info('vm ip {}'.format(ip)) vm_ips[i] = ip[1].strip() log.info('--vm ips i {} {}'.format(i, vm_ips[i])) elif self.provider == constants.GCE: ssh_client[i] = self.connector.wait_for_ping(self.vms[i]) vm_ips[i] = self.vms[i]['networkInterfaces'][0]['networkIP'] return ssh_client, vm_ips def attach_raid_disks(self, vm_tag, disk_args): device = [] for i in xrange(self.raid): if self.provider == constants.AWS: disk_args['device'] = '/dev/sd{}'.format(chr(120 - i)) '''device.append(disk_args['device'].replace('sd', 'xvd'))''' device.append('/dev/nvme{}n1'.format(i)) elif self.provider == constants.AZURE: disk_args['device'] = i device.append('/dev/sd{}'.format(chr(99 + i))) elif self.provider == constants.GCE: device.append('/dev/sd{}'.format(chr(98 + i))) self.connector.attach_disk(self.vms[vm_tag], disk_size=self.disk_size, **disk_args) return device def get_disk_devices(self): if not self.test_type: return None device = None disk_args = {} if self.provider == constants.AWS: device = constants.DEVICE_AWS.replace('sd', 'xvd') disk_args['iops'] = 5000 disk_args['volume_type'] = self.connector.volume_type['ssd_io1'] disk_args['device'] = constants.DEVICE_AWS elif self.provider == constants.AZURE: device = constants.DEVICE_AZURE elif self.provider == constants.GCE: # Note: using disk device order prediction,GCE API is not consistent in the disk naming # device = constants.DEVICE_GCE + disk_name device = constants.TEMP_DEVICE_GCE if self.test_type == constants.CLUSTER_DISK: self.connector.attach_disk(self.vms[1], disk_size=self.disk_size + 200, **disk_args) for i in xrange(2, self.vm_count + 1): self.connector.attach_disk(self.vms[i], disk_size=self.disk_size, **disk_args) time.sleep(3) return device vm_tag = None if self.test_type == constants.VM_DISK: vm_tag = 1 elif self.test_type == constants.DB_DISK: vm_tag = 2 if self.raid and type(self.raid) is int: return self.attach_raid_disks(vm_tag, disk_args) else: self.connector.attach_disk(self.vms[vm_tag], disk_size=self.disk_size, **disk_args) return device def perf_tuning(self): current_path = os.path.dirname(sys.modules['__main__'].__file__) for i in range(1, self.vm_count + 1): log.info('Running perf tuning on {}'.format(self.vm_ips[i])) self.ssh_client[i].connect() self.ssh_client[i].put_file(os.path.join(current_path, 'tests', 'perf_tuning.sh'), '/tmp/perf_tuning.sh') self.ssh_client[i].run('chmod +x /tmp/perf_tuning.sh') self.ssh_client[i].run("sed -i 's/\r//' /tmp/perf_tuning.sh") params = [self.provider] if '.deb' in self.kernel: log.info('Uploading kernel {} on {}'.format(self.kernel, self.vm_ips[i])) self.ssh_client[i].put_file(os.path.join(self.localpath, self.kernel), '/tmp/{}'.format(self.kernel)) params.append('/tmp/{}'.format(self.kernel)) log.info('Run perf_tuning.sh {}'.format(' '.join(params))) self.ssh_client[i].run('/tmp/perf_tuning.sh {}'.format(' '.join(params))) if self.provider in [constants.AWS, constants.GCE]: self.ssh_client[i] = self.connector.restart_vm(self.vms[i]) elif self.provider == constants.AZURE: self.vms[i] = self.connector.restart_vm(self.vms[i].name) # TODO add custom kernel support for all providers - only azure support self.ssh_client[i] = SSHClient(server=self.vms[i].name + self.connector.dns_suffix, host_key_file=self.connector.host_key_file, user=self.connector.user, ssh_key_file=os.path.join( self.connector.localpath, self.connector.key_name + '.pem')) ip = self.ssh_client[i].run( 'ifconfig eth0 | grep "inet" | cut -d: -f2 | awk -F " " \'{print $2}\' | head -n 1') self.vm_ips[i] = ip[1].strip() def run_test(self, ssh_vm_conf=0, testname=None, test_cmd=None, results_path=None, raid=False, ssh_raid=1, timeout=constants.TIMEOUT): try: if all(client is not None for client in self.ssh_client.values()): current_path = os.path.dirname(sys.modules['__main__'].__file__) # enable key auth between instances for i in xrange(1, ssh_vm_conf + 1): self.ssh_client[i].put_file(os.path.join(self.localpath, self.connector.key_name + '.pem'), '/home/{}/.ssh/id_rsa'.format(self.user)) self.ssh_client[i].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(self.user)) if raid: self.ssh_client[ssh_raid].put_file(os.path.join( current_path, 'tests', 'raid.sh'), '/tmp/raid.sh') self.ssh_client[ssh_raid].run('chmod +x /tmp/raid.sh') self.ssh_client[ssh_raid].run("sed -i 's/\r//' /tmp/raid.sh") self.ssh_client[ssh_raid].run('/tmp/raid.sh 0 {} {}'.format(raid, ' '.join( self.device))) print(self.device) bash_testname = 'run_{}.sh'.format(testname) self.ssh_client[1].put_file(os.path.join(current_path, 'tests', bash_testname), '/tmp/{}'.format(bash_testname)) self.ssh_client[1].run('chmod +x /tmp/{}'.format(bash_testname)) self.ssh_client[1].run("sed -i 's/\r//' /tmp/{}".format(bash_testname)) log.info('Starting background command {}'.format(test_cmd)) channel = self.ssh_client[1].run_pty(test_cmd) _, pid, _ = self.ssh_client[1].run( "ps aux | grep -v grep | grep {} | awk '{{print $2}}'".format( bash_testname)) self._wait_for_pid(self.ssh_client[1], bash_testname, pid, timeout=timeout) channel.close() self.ssh_client[1].get_file('/tmp/{}.zip'.format(testname), results_path) except Exception as e: log.exception(e) raise finally: if self.connector: self.connector.teardown() @staticmethod def _wait_for_pid(ssh_client, bash_testname, pid, timeout=constants.TIMEOUT):
def run_test_nohup(self, ssh_vm_conf=0, test_cmd=None, timeout=constants.TIMEOUT, track=None): try: if all(client is not None for client in self.ssh_client.values()): current_path = os.path.dirname(sys.modules['__main__'].__file__) # enable key auth between instances for i in xrange(1, ssh_vm_conf + 1): self.ssh_client[i].put_file(os.path.join(self.localpath, self.connector.key_name + '.pem'), '/home/{}/.ssh/id_rsa'.format(self.user)) self.ssh_client[i].run('chmod 0600 /home/{0}/.ssh/id_rsa'.format(self.user)) log.info('Starting run nohup command {}'.format(test_cmd)) self.ssh_client[1].run(test_cmd) self._wait_for_command(self.ssh_client[1], track, timeout=timeout) except Exception as e: log.exception(e) raise finally: log.info('Finish to run nohup command {}'.format(test_cmd)) @staticmethod def _wait_for_command(ssh_client, track, timeout=constants.TIMEOUT): t = 0 while t < timeout: try: _, p_count, _ = ssh_client.run( "ps aux | grep -v grep | grep {} | awk '{{print $2}}' | wc -l".format( track)) if int(p_count) == 0 : return except NoValidConnectionsError: log.debug('NoValidConnectionsError, will retry in 60 seconds') time.sleep(60) t += 60 time.sleep(60) t += 60 else: raise Exception('Timeout waiting for process to end.'.format(timeout))
t = 0 while t < timeout: try: _, new_pid, _ = ssh_client.run( "ps aux | grep -v grep | grep {} | awk '{{print $2}}'".format( bash_testname)) if new_pid != pid: return except NoValidConnectionsError: log.debug('NoValidConnectionsError, will retry in 60 seconds') time.sleep(60) t += 60 time.sleep(60) t += 60 else: raise Exception('Timeout waiting for process to end.'.format(timeout))
data.rs
//! Utilities related to the transfer of data from one place to another. Handles the conversion of data from one format //! to another and from one storage type to another. use std::fs::{File, OpenOptions}; use std::path::Path; use std::io::prelude::*; use std::io::BufReader; use std::fmt; use std::thread; use serde_json; use redis; use libc::{uint64_t, c_double}; use postgres::Connection; use transport::commands::HistTickDst; use transport::redis::get_client as get_redis_client; use transport::postgres::get_client as get_postgres_client; use transport::postgres::init_hist_data_table; use transport::query_server::QueryServer; use transport::command_server::CommandServer; use trading::tick::Tick; use conf::CONF; // TODO: Some kind of drop implementation that automatically clears the buffers when they're dropped /// Initializes the transfer of data from a `HistTickGen` to a `HistTickDst`. Data is read into an internal buffer within /// the generator and then written into the sink. pub fn transfer_data(src: HistTickDst, dst: HistTickDst, cs: CommandServer) { thread::spawn(move || { let tx_iterator = get_tx_iterator(src, cs); let mut rx_closure = get_rx_closure(dst).unwrap(); for tick in tx_iterator { rx_closure(tick); } }); } /// Given a `HistTickDst`, returns a closure that can be used as a receiver callback. pub fn get_rx_closure(dst: HistTickDst) -> Result<RxCallback, String> { let cb = match dst.clone() { HistTickDst::Console => { let inner = |t: Tick| { println!("{:?}", t); }; RxCallback{ dst: dst, inner: Box::new(inner), } }, HistTickDst::RedisChannel{host, channel} => { let client = get_redis_client(host.as_str()); // buffer up 5000 ticks in memory and send all at once to avoid issues // with persistant redis connections taking up lots of ports let mut buffer: Vec<String> = Vec::with_capacity(5000); let inner = move |t: Tick| { let client = &client; let tick_string = serde_json::to_string(&t).unwrap(); buffer.push(tick_string); // Send all buffered ticks once the buffer is full if buffer.len() >= 5000 { let mut pipe = redis::pipe(); for item in buffer.drain(..) { pipe.cmd("PUBLISH") .arg(&channel) .arg(item); } pipe.execute(client); } }; RxCallback { dst: dst, inner: Box::new(inner), } }, HistTickDst::RedisSet{host, set_name} => { let client = get_redis_client(host.as_str()); // buffer up 5000 ticks in memory and send all at once to avoid issues // with persistant redis connections taking up lots of ports let mut buffer: Vec<String> = Vec::with_capacity(5000); let inner = move |t: Tick| { let client = &client; let tick_string = serde_json::to_string(&t).unwrap(); buffer.push(tick_string); // Send all buffered ticks once the buffer is full if buffer.len() >= 5000 { let mut pipe = redis::pipe(); for item in buffer.drain(..) { pipe.cmd("SADD") .arg(&set_name) .arg(item); } pipe.execute(client); } }; RxCallback { dst: dst, inner: Box::new(inner), } }, HistTickDst::Flatfile{filename} => { let fnc = filename.clone(); let path = Path::new(&fnc); // create the file if it doesn't exist if !path.exists() { let _ = File::create(path).unwrap(); } // try to open the specified filename in append mode let file_opt = OpenOptions::new().append(true).open(path); if file_opt.is_err() { return Err(format!("Unable to open file with path {}", filename)); } let mut file = file_opt.unwrap(); let inner = move |t: Tick| { let tick_string = t.to_csv_row(); file.write_all(tick_string.as_str().as_bytes()) .expect(format!("couldn't write to output file: {}, {}", filename, tick_string).as_str()); }; RxCallback { dst: dst, inner: Box::new(inner), } }, HistTickDst::Postgres{table} => { let connection_opt = get_postgres_client(); if connection_opt.is_err() { return Err(String::from("Unable to connect to PostgreSQL!")) } let connection = connection_opt.unwrap(); try!(init_hist_data_table(table.as_str(), &connection, CONF.postgres_user)); let mut qs = QueryServer::new(10); let mut inner_buffer = Vec::with_capacity(5000); let inner = move |t: Tick| { let val = format!("({}, {}, {})", t.timestamp, t.bid, t.ask); inner_buffer.push(val); if inner_buffer.len() > 4999 { let mut query = String::from(format!("INSERT INTO {} (tick_time, bid, ask) VALUES ", table)); let values = inner_buffer.as_slice().join(", "); query += &values; query += ";"; qs.execute(query); inner_buffer.clear(); } }; RxCallback { dst: dst, inner: Box::new(inner), } }, }; Ok(cb) } /// A struct that functions as a callback for ticks in a generator. pub struct RxCallback { dst: HistTickDst, inner: Box<FnMut(Tick)>, } impl FnOnce<(Tick,)> for RxCallback { type Output = (); extern "rust-call" fn call_once(self, args: (Tick,)) { let mut inner = self.inner; inner(args.0) } } impl FnMut<(Tick,)> for RxCallback { extern "rust-call" fn call_mut(&mut self, args: (Tick,)) { (*self.inner)(args.0) } } impl fmt::Debug for RxCallback { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "RxCallback: {:?}", self.dst) } } pub struct TxCallback { inner: Box<FnMut(uint64_t, c_double, c_double)>, } impl FnOnce<(uint64_t, c_double, c_double,)> for TxCallback { type Output = (); extern "rust-call" fn call_once(self, args: (uint64_t, c_double, c_double,)) { let mut inner = self.inner; inner(args.0, args.1, args.2) } } impl FnMut<(uint64_t, c_double, c_double,)> for TxCallback { extern "rust-call" fn call_mut(&mut self, args: (uint64_t, c_double, c_double,)) { (*self.inner)(args.0, args.1, args.2) } } fn get_tx_iterator(src: HistTickDst, cs: CommandServer) -> Box<HistTickGen> { match src { HistTickDst::Flatfile{filename} => { Box::new(FlatfileReader::new(filename, cs)) }, HistTickDst::Postgres{table} => { Box::new(PostgresReader::new(table.to_string(), cs)) } _ => unimplemented!(), } } /// This trait is implemented by objects that generate historical ticks from stored source. pub trait HistTickGen { /// Signal the tick generator to populate its internal tick buffer. fn populate_buffer(&mut self) -> Result<(), String>; /// Gets a mutable reference to the generator's internal tick buffer. fn get_buffer(&mut self) -> &mut Vec<Tick>; /// Tick generators must provide access to a `CommandServer` for logging purposes fn get_cs(&mut self) -> &mut CommandServer; } impl Iterator for HistTickGen { type Item = Tick; fn next(&mut self) -> Option<Tick> { if self.get_buffer().is_empty() { match self.populate_buffer() { Ok(_) => (), Err(err) => self.get_cs().error(Some("Tick Loading"), &format!("Error while loading ticks into buffer: {}", err)), }; } self.get_buffer().pop() } } /// A historical tick reader that draws upon a CSV file as a data source struct FlatfileReader { buffer: Vec<Tick>, buf_reader: BufReader<File>, cs: CommandServer, } impl HistTickGen for FlatfileReader { fn get_buffer(&mut self) -> &mut Vec<Tick> { &mut self.buffer } /// Reads lines out of the file to fill the buffer fn populate_buffer(&mut self) -> Result<(), String> { assert_eq!(self.buffer.len(), 0); for _ in 0..500 { let mut buf = String::new(); let _ = self.buf_reader.read_line(&mut buf).unwrap(); let tick = Tick::from_csv_string(&buf); self.buffer.push(tick); } Ok(()) } fn
(&mut self) -> &mut CommandServer { &mut self.cs } } impl FlatfileReader { pub fn new(filename: String, cs: CommandServer) -> FlatfileReader { let path = Path::new(&filename); let file = File::open(path).expect(&format!("Unable to open file at {:?}", path)); let mut reader = BufReader::new(file); // skip header row let _ = reader.read_line(&mut String::new()); FlatfileReader { buf_reader: reader, buffer: Vec::with_capacity(500), cs: cs, } } } /// A historical tick generator that draws upon a PostgreSQL table as its data source struct PostgresReader { buffer: Vec<Tick>, conn: Connection, last_timestamp: usize, table_name: String, cs: CommandServer, } impl HistTickGen for PostgresReader { fn get_buffer(&mut self) -> &mut Vec<Tick> { &mut self.buffer } /// Queries the database and populates the buffer with rows from the database fn populate_buffer(&mut self) -> Result<(), String> { assert_eq!(self.buffer.len(), 0); let query = format!( "SELECT (tick_time, bid, ask) FROM {} WHERE tick_time > {} LIMIT 500;", self.table_name, self.last_timestamp ); let rows = self.conn.query(&query, &[]).map_err(|x| format!("{:?}", x))?; for (i, row) in rows.iter().enumerate() { let t = Tick { timestamp: row.get::<usize, i64>(0) as u64, bid: row.get::<usize, i64>(1) as usize, ask: row.get::<usize, i64>(2) as usize, }; self.buffer[i] = t; } Ok(()) } fn get_cs(&mut self) -> &mut CommandServer { &mut self.cs } } impl PostgresReader { pub fn new(table_name: String, cs: CommandServer) -> PostgresReader { let conn = get_postgres_client(); PostgresReader { buffer: Vec::with_capacity(500), conn: conn.unwrap(), last_timestamp: 0, table_name: table_name, cs: cs, } } }
get_cs
integrationtestnet.go
package networks import ( "github.com/filecoin-project/go-address" "github.com/filecoin-project/go-state-types/abi" "github.com/filecoin-project/venus/pkg/config" ) func
() *NetworkConf { return &NetworkConf{ Bootstrap: config.BootstrapConfig{ Addresses: []string{}, MinPeerThreshold: 0, Period: "30s", }, Network: config.NetworkParamsConfig{ BlockDelay: 30, ConsensusMinerMinPower: 10 << 40, ForkUpgradeParam: &config.ForkUpgradeConfig{ UpgradeBreezeHeight: 41280, UpgradeSmokeHeight: 51000, UpgradeIgnitionHeight: 94000, UpgradeRefuelHeight: 130800, UpgradeAssemblyHeight: 138720, UpgradeTapeHeight: 140760, UpgradeLiftoffHeight: 148888, UpgradeKumquatHeight: 170000, UpgradePriceListOopsHeight: 265199, UpgradeCalicoHeight: 265200, UpgradePersianHeight: 265200 + (120 * 60), UpgradeOrangeHeight: 336458, UpgradeTrustHeight: 550321, UpgradeNorwegianHeight: 665280, UpgradeTurboHeight: 712320, UpgradeHyperdriveHeight: 892800, BreezeGasTampingDuration: 120, UpgradeClausHeight: 343200, }, DrandSchedule: map[abi.ChainEpoch]config.DrandEnum{0: 5, 51000: 1}, AddressNetwork: address.Testnet, PreCommitChallengeDelay: abi.ChainEpoch(150), }, } }
IntegrationNet
good.d.ts
declare module 'good';
ionic.min.js
/*!
* http://drifty.com/ * * Ionic, v1.3.1-nightly-4219 * A powerful HTML5 mobile app framework. * http://ionicframework.com/ * * By @maxlynch, @benjsperry, @adamdbradley <3 * * Licensed under the MIT license. Please see LICENSE for more information. * */ !function(){function e(e,t,n){t!==!1?X.addEventListener(e,J[e],n):X.removeEventListener(e,J[e])}function t(e){var t=w(e.target),i=T(t);if(ionic.tap.requiresNativeClick(i)||$)return!1;var r=ionic.tap.pointerCoord(e);n("click",i,r.x,r.y),h(i)}function n(e,t,n,i){var r=document.createEvent("MouseEvents");r.initMouseEvent(e,!0,!0,window,1,0,0,n,i,!1,!1,!1,!1,0,null),r.isIonicTap=!0,t.dispatchEvent(r)}function i(e){return"submit"==e.target.type&&0===e.detail?null:ionic.scroll.isScrolling&&ionic.tap.containsOrIsTextInput(e.target)||!e.isIonicTap&&!ionic.tap.requiresNativeClick(e.target)?(e.stopPropagation(),ionic.tap.isLabelWithTextInput(e.target)||e.preventDefault(),!1):void 0}function r(t){return t.isIonicTap||p(t)?null:B?(t.stopPropagation(),ionic.Platform.isEdge()||ionic.tap.isTextInput(t.target)&&K===t.target||b(t.target.tagName)||ionic.tap.isVideo(t.target)||t.preventDefault(),!1):($=!1,q=ionic.tap.pointerCoord(t),e("mousemove"),void ionic.activator.start(t))}function a(n){return B?(n.stopPropagation(),n.preventDefault(),!1):p(n)||b(n.target.tagName)?!1:(_(n)||t(n),e("mousemove",!1),ionic.activator.end(),void($=!1))}function o(t){return _(t)?(e("mousemove",!1),ionic.activator.end(),$=!0,!1):void 0}function s(t){if(!p(t)&&($=!1,u(),q=ionic.tap.pointerCoord(t),e(j),ionic.activator.start(t),ionic.Platform.isIOS()&&ionic.tap.isLabelWithTextInput(t.target))){var n=T(w(t.target));n!==F&&t.preventDefault()}}function l(e){p(e)||(u(),_(e)||(t(e),b(e.target.tagName)&&e.preventDefault()),K=e.target,d())}function c(t){return _(t)?($=!0,e(j,!1),ionic.activator.end(),!1):void 0}function d(){e(j,!1),ionic.activator.end(),$=!1}function u(){B=!0,clearTimeout(W),W=setTimeout(function(){B=!1},600)}function p(e){return e.isTapHandled?!0:(e.isTapHandled=!0,ionic.tap.isElementTapDisabled(e.target)?!0:ionic.scroll.isScrolling&&ionic.tap.containsOrIsTextInput(e.target)?(e.preventDefault(),!0):void 0)}function h(e){U=null;var t=!1;"SELECT"==e.tagName?(n("mousedown",e,0,0),e.focus&&e.focus(),t=!0):v()===e?t=!0:/^(input|textarea|ion-label)$/i.test(e.tagName)||e.isContentEditable?(t=!0,e.focus&&e.focus(),e.value=e.value,B&&(U=e)):f(),t&&(v(e),ionic.trigger("ionic.focusin",{target:e},!0))}function f(){var e=v();e&&(/^(input|textarea|select)$/i.test(e.tagName)||e.isContentEditable)&&e.blur(),v(null)}function m(e){B&&ionic.tap.isTextInput(v())&&ionic.tap.isTextInput(U)&&U!==e.target&&(U.focus(),U=null),ionic.scroll.isScrolling=!1}function g(){v(null)}function v(e){return arguments.length&&(F=e),F||document.activeElement}function _(e){if(!e||1!==e.target.nodeType||!q||0===q.x&&0===q.y)return!1;var t=ionic.tap.pointerCoord(e),n=!(!e.target.classList||!e.target.classList.contains||"function"!=typeof e.target.classList.contains),i=n&&e.target.classList.contains("button")?Q:Z;return Math.abs(q.x-t.x)>i||Math.abs(q.y-t.y)>i}function w(e,t){for(var n=e,i=0;6>i&&n;i++){if("LABEL"===n.tagName)return n;n=n.parentElement}return t!==!1?e:void 0}function T(e){if(e&&"LABEL"===e.tagName){if(e.control)return e.control;if(e.querySelector){var t=e.querySelector("input,textarea,select");if(t)return t}}return e}function b(e){return/^(select|option)$/i.test(e)}function S(){ionic.keyboard.isInitialized||(V()?(window.addEventListener("native.keyboardshow",pe),window.addEventListener("native.keyboardhide",x)):document.body.addEventListener("focusout",x),document.body.addEventListener("ionic.focusin",ue),document.body.addEventListener("focusin",ue),window.navigator.msPointerEnabled?document.removeEventListener("MSPointerDown",S):document.removeEventListener("touchstart",S),ionic.keyboard.isInitialized=!0)}function y(e){clearTimeout(re),(!ionic.keyboard.isOpen||ionic.keyboard.isClosing)&&(ionic.keyboard.isOpening=!0,ionic.keyboard.isClosing=!1),ionic.keyboard.height=e.keyboardHeight,le?C(O,!0):C(I,!0)}function E(e){return clearTimeout(re),e.target&&!e.target.readOnly&&ionic.tap.isKeyboardElement(e.target)&&(ne=ionic.DomUtil.getParentWithClass(e.target,de))?(ee=e.target,ne.classList.contains("overflow-scroll")||(document.body.scrollTop=0,ne.scrollTop=0,ionic.requestAnimationFrame(function(){document.body.scrollTop=0,ne.scrollTop=0}),window.navigator.msPointerEnabled?document.addEventListener("MSPointerMove",L,!1):document.addEventListener("touchmove",L,!1)),(!ionic.keyboard.isOpen||ionic.keyboard.isClosing)&&(ionic.keyboard.isOpening=!0,ionic.keyboard.isClosing=!1),document.addEventListener("keydown",M,!1),void(ionic.keyboard.isOpen||V()?ionic.keyboard.isOpen&&I():C(I,!0))):(ee&&(te=ee),void(ee=null))}function x(){clearTimeout(re),(ionic.keyboard.isOpen||ionic.keyboard.isOpening)&&(ionic.keyboard.isClosing=!0,ionic.keyboard.isOpening=!1),re=setTimeout(function(){ionic.requestAnimationFrame(function(){le?C(function(){O(),P()},!1):C(P,!1)})},50)}function D(){ionic.keyboard.isLandscape=!ionic.keyboard.isLandscape,ionic.Platform.isIOS()&&O(),ionic.Platform.isAndroid()&&(ionic.keyboard.isOpen&&V()?le=!0:C(O,!1))}function M(e){ionic.scroll.isScrolling&&L(e)}function L(e){"TEXTAREA"!==e.target.tagName&&e.preventDefault()}function C(e,t){clearInterval(ie);var n,i=0,r=G(),a=r;return n=ionic.Platform.isAndroid()&&ionic.Platform.version()<4.4?30:ionic.Platform.isAndroid()?10:1,ie=setInterval(function(){a=G(),(!(++i<n)||(N(a)||z(a))&&ionic.keyboard.height)&&(V()||(ionic.keyboard.height=Math.abs(r-window.innerHeight)),ionic.keyboard.isOpen=t,clearInterval(ie),e())},50),n}function P(){clearTimeout(re),ionic.keyboard.isOpen=!1,ionic.keyboard.isClosing=!1,(ee||te)&&ionic.trigger("resetScrollView",{target:ee||te},!0),ionic.requestAnimationFrame(function(){document.body.classList.remove(ce)}),window.navigator.msPointerEnabled?document.removeEventListener("MSPointerMove",L):document.removeEventListener("touchmove",L),document.removeEventListener("keydown",M),ionic.Platform.isAndroid()&&(V()&&cordova.plugins.Keyboard.close(),ee&&ee.blur()),ee=null,te=null}function I(){ionic.keyboard.isOpen=!0,ionic.keyboard.isOpening=!1;var e={keyboardHeight:k(),viewportHeight:ae};if(ee){e.target=ee;var t=ee.getBoundingClientRect();e.elementTop=Math.round(t.top),e.elementBottom=Math.round(t.bottom),e.windowHeight=e.viewportHeight-e.keyboardHeight,e.isElementUnderKeyboard=e.elementBottom>e.windowHeight,ionic.trigger("scrollChildIntoView",e,!0)}return setTimeout(function(){document.body.classList.add(ce)},400),e}function k(){if(ionic.keyboard.height)return ionic.keyboard.height;if(ionic.Platform.isAndroid()){if(ionic.Platform.isFullScreen)return 275;var e=window.innerHeight;return ae>e?ae-e:0}return ionic.Platform.isIOS()?ionic.keyboard.isLandscape?206:ionic.Platform.isWebView()?260:216:275}function N(e){return!!(!ionic.keyboard.isLandscape&&oe&&Math.abs(oe-e)<2)}function z(e){return!!(ionic.keyboard.isLandscape&&se&&Math.abs(se-e)<2)}function O(){le=!1,ae=G(),ionic.keyboard.isLandscape&&!se?se=ae:ionic.keyboard.isLandscape||oe||(oe=ae),ee&&ionic.trigger("resetScrollView",{target:ee},!0),ionic.keyboard.isOpen&&ionic.tap.isTextInput(ee)&&I()}function A(){var e=G();e/window.innerWidth<1&&(ionic.keyboard.isLandscape=!0),ae=e,ionic.keyboard.isLandscape&&!se?se=ae:ionic.keyboard.isLandscape||oe||(oe=ae)}function G(){var e=window.innerHeight;return ionic.Platform.isAndroid()&&ionic.Platform.isFullScreen||!ionic.keyboard.isOpen&&!ionic.keyboard.isOpening||ionic.keyboard.isClosing?e:e+k()}function V(){return!!(window.cordova&&cordova.plugins&&cordova.plugins.Keyboard)}function R(){var e;for(e=0;e<document.head.children.length;e++)if("viewport"==document.head.children[e].name){he=document.head.children[e];break}if(he){var t,n=he.content.toLowerCase().replace(/\s+/g,"").split(",");for(e=0;e<n.length;e++)n[e]&&(t=n[e].split("="),fe[t[0]]=t.length>1?t[1]:"_");H()}}function H(){var e=fe.width,t=fe.height,n=ionic.Platform,i=n.version(),r="device-width",a="device-height",o=ionic.viewport.orientation();delete fe.height,fe.width=r,n.isIPad()?i>7?delete fe.width:n.isWebView()?90==o?fe.height="0":7==i&&(fe.height=a):7>i&&(fe.height="0"):n.isIOS()&&(n.isWebView()?i>7?delete fe.width:7>i?t&&(fe.height="0"):7==i&&(fe.height=a):7>i&&t&&(fe.height="0")),(e!==fe.width||t!==fe.height)&&Y()}function Y(){var e,t=[];for(e in fe)fe[e]&&t.push(e+("_"==fe[e]?"":"="+fe[e]));he.content=t.join(", ")}window.ionic=window.ionic||{},window.ionic.views={},window.ionic.version="1.3.1-nightly-4219",function(e){e.DelegateService=function(e){function t(){return!0}if(e.indexOf("$getByHandle")>-1)throw new Error("Method '$getByHandle' is implicitly added to each delegate service. Do not list it as a method.");return["$log",function(n){function i(e,t){this._instances=e,this.handle=t}function r(){this._instances=[]}function a(e){return function(){var t,i=this.handle,r=arguments,a=0;return this._instances.forEach(function(n){if((!i||i==n.$$delegateHandle)&&n.$$filterFn(n)){a++;var o=n[e].apply(n,r);1===a&&(t=o)}}),!a&&i?n.warn('Delegate for handle "'+i+'" could not find a corresponding element with delegate-handle="'+i+'"! '+e+"() was not called!\nPossible cause: If you are calling "+e+'() immediately, and your element with delegate-handle="'+i+'" is a child of your controller, then your element may not be compiled yet. Put a $timeout around your call to '+e+"() and try again."):t}}return e.forEach(function(e){i.prototype[e]=a(e)}),r.prototype=i.prototype,r.prototype._registerInstance=function(e,n,i){var r=this._instances;return e.$$delegateHandle=n,e.$$filterFn=i||t,r.push(e),function(){var t=r.indexOf(e);-1!==t&&r.splice(t,1)}},r.prototype.$getByHandle=function(e){return new i(this._instances,e)},new r}]}}(window.ionic),function(e,t,n){function i(){a=!0;for(var e=0;e<r.length;e++)n.requestAnimationFrame(r[e]);r=[],t.removeEventListener("DOMContentLoaded",i)}var r=[],a="complete"===t.readyState||"interactive"===t.readyState;a||t.addEventListener("DOMContentLoaded",i),e._rAF=function(){return e.requestAnimationFrame||e.webkitRequestAnimationFrame||e.mozRequestAnimationFrame||function(t){e.setTimeout(t,16)}}();var o=e.cancelAnimationFrame||e.webkitCancelAnimationFrame||e.mozCancelAnimationFrame||e.webkitCancelRequestAnimationFrame;n.DomUtil={requestAnimationFrame:function(t){return e._rAF(t)},cancelAnimationFrame:function(e){o(e)},animationFrameThrottle:function(e){var t,i,r;return function(){t=arguments,r=this,i||(i=!0,n.requestAnimationFrame(function(){e.apply(r,t),i=!1}))}},contains:function(e,t){for(var n=t;n;){if(n===e)return!0;n=n.parentNode}},getPositionInParent:function(e){return{left:e.offsetLeft,top:e.offsetTop}},getOffsetTop:function(e){var t=0;if(e.offsetParent){do t+=e.offsetTop,e=e.offsetParent;while(e);return t}},ready:function(e){a?n.requestAnimationFrame(e):r.push(e)},getTextBounds:function(n){if(t.createRange){var i=t.createRange();if(i.selectNodeContents(n),i.getBoundingClientRect){var r=i.getBoundingClientRect();if(r){var a=e.scrollX,o=e.scrollY;return{top:r.top+o,left:r.left+a,right:r.left+a+r.width,bottom:r.top+o+r.height,width:r.width,height:r.height}}}}return null},getChildIndex:function(e,t){if(t)for(var n,i=e.parentNode.children,r=0,a=0,o=i.length;o>r;r++)if(n=i[r],n.nodeName&&n.nodeName.toLowerCase()==t){if(n==e)return a;a++}return Array.prototype.slice.call(e.parentNode.children).indexOf(e)},swapNodes:function(e,t){t.parentNode.insertBefore(e,t)},elementIsDescendant:function(e,t,n){var i=e;do{if(i===t)return!0;i=i.parentNode}while(i&&i!==n);return!1},getParentWithClass:function(e,t,n){for(n=n||10;e.parentNode&&n--;){if(e.parentNode.classList&&e.parentNode.classList.contains(t))return e.parentNode;e=e.parentNode}return null},getParentOrSelfWithClass:function(e,t,n){for(n=n||10;e&&n--;){if(e.classList&&e.classList.contains(t))return e;e=e.parentNode}return null},rectContains:function(e,t,n,i,r,a){return n>e||e>r?!1:i>t||t>a?!1:!0},blurAll:function(){return t.activeElement&&t.activeElement!=t.body?(t.activeElement.blur(),t.activeElement):null},cachedAttr:function(e,t,n){if(e=e&&e.length&&e[0]||e,e&&e.setAttribute){var i="$attr-"+t;return arguments.length>2?e[i]!==n&&(e.setAttribute(t,n),e[i]=n):"undefined"==typeof e[i]&&(e[i]=e.getAttribute(t)),e[i]}},cachedStyles:function(e,t){if(e=e&&e.length&&e[0]||e,e&&e.style)for(var n in t)e["$style-"+n]!==t[n]&&(e.style[n]=e["$style-"+n]=t[n])}},n.requestAnimationFrame=n.DomUtil.requestAnimationFrame,n.cancelAnimationFrame=n.DomUtil.cancelAnimationFrame,n.animationFrameThrottle=n.DomUtil.animationFrameThrottle}(window,document,ionic),function(e){e.CustomEvent=function(){if("function"==typeof window.CustomEvent)return CustomEvent;var e=function(e,t){var n;t=t||{bubbles:!1,cancelable:!1,detail:void 0};try{n=document.createEvent("CustomEvent"),n.initCustomEvent(e,t.bubbles,t.cancelable,t.detail)}catch(i){n=document.createEvent("Event");for(var r in t)n[r]=t[r];n.initEvent(e,t.bubbles,t.cancelable)}return n};return e.prototype=window.Event.prototype,e}(),e.EventController={VIRTUALIZED_EVENTS:["tap","swipe","swiperight","swipeleft","drag","hold","release"],trigger:function(t,n,i,r){var a=new e.CustomEvent(t,{detail:n,bubbles:!!i,cancelable:!!r});n&&n.target&&n.target.dispatchEvent&&n.target.dispatchEvent(a)||window.dispatchEvent(a)},on:function(t,n,i){for(var r=i||window,a=0,o=this.VIRTUALIZED_EVENTS.length;o>a;a++)if(t==this.VIRTUALIZED_EVENTS[a]){var s=new e.Gesture(i);return s.on(t,n),s}r.addEventListener(t,n)},off:function(e,t,n){n.removeEventListener(e,t)},onGesture:function(t,n,i,r){var a=new e.Gesture(i,r);return a.on(t,n),a},offGesture:function(e,t,n){e&&e.off(t,n)},handlePopState:function(){}},e.on=function(){e.EventController.on.apply(e.EventController,arguments)},e.off=function(){e.EventController.off.apply(e.EventController,arguments)},e.trigger=e.EventController.trigger,e.onGesture=function(){return e.EventController.onGesture.apply(e.EventController.onGesture,arguments)},e.offGesture=function(){return e.EventController.offGesture.apply(e.EventController.offGesture,arguments)}}(window.ionic),function(e){function t(){if(!e.Gestures.READY){e.Gestures.event.determineEventTypes();for(var t in e.Gestures.gestures)e.Gestures.gestures.hasOwnProperty(t)&&e.Gestures.detection.register(e.Gestures.gestures[t]);e.Gestures.event.onTouch(e.Gestures.DOCUMENT,e.Gestures.EVENT_MOVE,e.Gestures.detection.detect),e.Gestures.event.onTouch(e.Gestures.DOCUMENT,e.Gestures.EVENT_END,e.Gestures.detection.detect),e.Gestures.READY=!0}}e.Gesture=function(t,n){return new e.Gestures.Instance(t,n||{})},e.Gestures={},e.Gestures.defaults={stop_browser_behavior:"disable-user-behavior"},e.Gestures.HAS_POINTEREVENTS=window.navigator.pointerEnabled||window.navigator.msPointerEnabled,e.Gestures.HAS_TOUCHEVENTS="ontouchstart"in window,e.Gestures.MOBILE_REGEX=/mobile|tablet|ip(ad|hone|od)|android|silk/i,e.Gestures.NO_MOUSEEVENTS=e.Gestures.HAS_TOUCHEVENTS&&window.navigator.userAgent.match(e.Gestures.MOBILE_REGEX),e.Gestures.EVENT_TYPES={},e.Gestures.DIRECTION_DOWN="down",e.Gestures.DIRECTION_LEFT="left",e.Gestures.DIRECTION_UP="up",e.Gestures.DIRECTION_RIGHT="right",e.Gestures.POINTER_MOUSE="mouse",e.Gestures.POINTER_TOUCH="touch",e.Gestures.POINTER_PEN="pen",e.Gestures.EVENT_START="start",e.Gestures.EVENT_MOVE="move",e.Gestures.EVENT_END="end",e.Gestures.DOCUMENT=window.document,e.Gestures.plugins={},e.Gestures.READY=!1,e.Gestures.Instance=function(n,i){var r=this;return null===n?this:(t(),this.element=n,this.enabled=!0,this.options=e.Gestures.utils.extend(e.Gestures.utils.extend({},e.Gestures.defaults),i||{}),this.options.stop_browser_behavior&&e.Gestures.utils.stopDefaultBrowserBehavior(this.element,this.options.stop_browser_behavior),e.Gestures.event.onTouch(n,e.Gestures.EVENT_START,function(t){r.enabled&&e.Gestures.detection.startDetect(r,t)}),this)},e.Gestures.Instance.prototype={on:function(e,t){for(var n=e.split(" "),i=0;i<n.length;i++)this.element.addEventListener(n[i],t,!1);return this},off:function(e,t){for(var n=e.split(" "),i=0;i<n.length;i++)this.element.removeEventListener(n[i],t,!1);return this},trigger:function(t,n){var i=e.Gestures.DOCUMENT.createEvent("Event");i.initEvent(t,!0,!0),i.gesture=n;var r=this.element;return e.Gestures.utils.hasParent(n.target,r)&&(r=n.target),r.dispatchEvent(i),this},enable:function(e){return this.enabled=e,this}};var n=null,i=!1,r=!1;e.Gestures.event={bindDom:function(e,t,n){for(var i=t.split(" "),r=0;r<i.length;r++)e.addEventListener(i[r],n,!1)},onTouch:function(t,a,o){var s=this;this.bindDom(t,e.Gestures.EVENT_TYPES[a],function(l){var c=l.type.toLowerCase();if(!c.match(/mouse/)||!r){c.match(/touch/)||c.match(/pointerdown/)||c.match(/mouse/)&&1===l.which?i=!0:c.match(/mouse/)&&1!==l.which&&(i=!1),c.match(/touch|pointer/)&&(r=!0);var d=0;i&&(e.Gestures.HAS_POINTEREVENTS&&a!=e.Gestures.EVENT_END?d=e.Gestures.PointerEvent.updatePointer(a,l):c.match(/touch/)?d=l.touches.length:r||(d=c.match(/up/)?0:1),d>0&&a==e.Gestures.EVENT_END?a=e.Gestures.EVENT_MOVE:d||(a=e.Gestures.EVENT_END),(d||null===n)&&(n=l),o.call(e.Gestures.detection,s.collectEventData(t,a,s.getTouchList(n,a),l)),e.Gestures.HAS_POINTEREVENTS&&a==e.Gestures.EVENT_END&&(d=e.Gestures.PointerEvent.updatePointer(a,l))),d||(n=null,i=!1,r=!1,e.Gestures.PointerEvent.reset())}})},determineEventTypes:function(){var t;t=e.Gestures.HAS_POINTEREVENTS?e.Gestures.PointerEvent.getEvents():e.Gestures.NO_MOUSEEVENTS?["touchstart","touchmove","touchend touchcancel"]:["touchstart mousedown","touchmove mousemove","touchend touchcancel mouseup"],e.Gestures.EVENT_TYPES[e.Gestures.EVENT_START]=t[0],e.Gestures.EVENT_TYPES[e.Gestures.EVENT_MOVE]=t[1],e.Gestures.EVENT_TYPES[e.Gestures.EVENT_END]=t[2]},getTouchList:function(t){return e.Gestures.HAS_POINTEREVENTS?e.Gestures.PointerEvent.getTouchList():t.touches?t.touches:(t.identifier=1,[t])},collectEventData:function(t,n,i,r){var a=e.Gestures.POINTER_TOUCH;return(r.type.match(/mouse/)||e.Gestures.PointerEvent.matchType(e.Gestures.POINTER_MOUSE,r))&&(a=e.Gestures.POINTER_MOUSE),{center:e.Gestures.utils.getCenter(i),timeStamp:(new Date).getTime(),target:r.target,touches:i,eventType:n,pointerType:a,srcEvent:r,preventDefault:function(){this.srcEvent.preventManipulation&&this.srcEvent.preventManipulation(),this.srcEvent.preventDefault},stopPropagation:function(){this.srcEvent.stopPropagation()},stopDetect:function(){return e.Gestures.detection.stopDetect()}}}},e.Gestures.PointerEvent={pointers:{},getTouchList:function(){var e=this,t=[];return Object.keys(e.pointers).sort().forEach(function(n){t.push(e.pointers[n])}),t},updatePointer:function(t,n){return t==e.Gestures.EVENT_END?this.pointers={}:(n.identifier=n.pointerId,this.pointers[n.pointerId]=n),Object.keys(this.pointers).length},matchType:function(t,n){if(!n.pointerType)return!1;var i={};return i[e.Gestures.POINTER_MOUSE]=n.pointerType==n.MSPOINTER_TYPE_MOUSE||n.pointerType==e.Gestures.POINTER_MOUSE,i[e.Gestures.POINTER_TOUCH]=n.pointerType==n.MSPOINTER_TYPE_TOUCH||n.pointerType==e.Gestures.POINTER_TOUCH,i[e.Gestures.POINTER_PEN]=n.pointerType==n.MSPOINTER_TYPE_PEN||n.pointerType==e.Gestures.POINTER_PEN,i[t]},getEvents:function(){return["pointerdown MSPointerDown","pointermove MSPointerMove","pointerup pointercancel MSPointerUp MSPointerCancel"]},reset:function(){this.pointers={}}},e.Gestures.utils={extend:function(e,t,n){for(var i in t)void 0!==e[i]&&n||(e[i]=t[i]);return e},hasParent:function(e,t){for(;e;){if(e==t)return!0;e=e.parentNode}return!1},getCenter:function(e){for(var t=[],n=[],i=0,r=e.length;r>i;i++)t.push(e[i].pageX),n.push(e[i].pageY);return{pageX:(Math.min.apply(Math,t)+Math.max.apply(Math,t))/2,pageY:(Math.min.apply(Math,n)+Math.max.apply(Math,n))/2}},getVelocity:function(e,t,n){return{x:Math.abs(t/e)||0,y:Math.abs(n/e)||0}},getAngle:function(e,t){var n=t.pageY-e.pageY,i=t.pageX-e.pageX;return 180*Math.atan2(n,i)/Math.PI},getDirection:function(t,n){var i=Math.abs(t.pageX-n.pageX),r=Math.abs(t.pageY-n.pageY);return i>=r?t.pageX-n.pageX>0?e.Gestures.DIRECTION_LEFT:e.Gestures.DIRECTION_RIGHT:t.pageY-n.pageY>0?e.Gestures.DIRECTION_UP:e.Gestures.DIRECTION_DOWN},getDistance:function(e,t){var n=t.pageX-e.pageX,i=t.pageY-e.pageY;return Math.sqrt(n*n+i*i)},getScale:function(e,t){return e.length>=2&&t.length>=2?this.getDistance(t[0],t[1])/this.getDistance(e[0],e[1]):1},getRotation:function(e,t){return e.length>=2&&t.length>=2?this.getAngle(t[1],t[0])-this.getAngle(e[1],e[0]):0},isVertical:function(t){return t==e.Gestures.DIRECTION_UP||t==e.Gestures.DIRECTION_DOWN},stopDefaultBrowserBehavior:function(e,t){e&&e.classList&&(e.classList.add(t),e.onselectstart=function(){return!1})}},e.Gestures.detection={gestures:[],current:null,previous:null,stopped:!1,startDetect:function(t,n){this.current||(this.stopped=!1,this.current={inst:t,startEvent:e.Gestures.utils.extend({},n),lastEvent:!1,name:""},this.detect(n))},detect:function(t){if(!this.current||this.stopped)return null;t=this.extendEventData(t);for(var n=this.current.inst.options,i=0,r=this.gestures.length;r>i;i++){var a=this.gestures[i];if(!this.stopped&&n[a.name]!==!1&&a.handler.call(a,t,this.current.inst)===!1){this.stopDetect();break}}return this.current&&(this.current.lastEvent=t),t.eventType==e.Gestures.EVENT_END&&!t.touches.length-1&&this.stopDetect(),t},stopDetect:function(){this.previous=e.Gestures.utils.extend({},this.current),this.current=null,this.stopped=!0},extendEventData:function(t){var n=this.current.startEvent;if(n&&(t.touches.length!=n.touches.length||t.touches===n.touches)){n.touches=[];for(var i=0,r=t.touches.length;r>i;i++)n.touches.push(e.Gestures.utils.extend({},t.touches[i]))}var a=t.timeStamp-n.timeStamp,o=t.center.pageX-n.center.pageX,s=t.center.pageY-n.center.pageY,l=e.Gestures.utils.getVelocity(a,o,s);return e.Gestures.utils.extend(t,{deltaTime:a,deltaX:o,deltaY:s,velocityX:l.x,velocityY:l.y,distance:e.Gestures.utils.getDistance(n.center,t.center),angle:e.Gestures.utils.getAngle(n.center,t.center),direction:e.Gestures.utils.getDirection(n.center,t.center),scale:e.Gestures.utils.getScale(n.touches,t.touches),rotation:e.Gestures.utils.getRotation(n.touches,t.touches),startEvent:n}),t},register:function(t){var n=t.defaults||{};return void 0===n[t.name]&&(n[t.name]=!0),e.Gestures.utils.extend(e.Gestures.defaults,n,!0),t.index=t.index||1e3,this.gestures.push(t),this.gestures.sort(function(e,t){return e.index<t.index?-1:e.index>t.index?1:0}),this.gestures}},e.Gestures.gestures=e.Gestures.gestures||{},e.Gestures.gestures.Hold={name:"hold",index:10,defaults:{hold_timeout:500,hold_threshold:9},timer:null,handler:function(t,n){switch(t.eventType){case e.Gestures.EVENT_START:clearTimeout(this.timer),e.Gestures.detection.current.name=this.name,this.timer=setTimeout(function(){"hold"==e.Gestures.detection.current.name&&(e.tap.cancelClick(),n.trigger("hold",t))},n.options.hold_timeout);break;case e.Gestures.EVENT_MOVE:t.distance>n.options.hold_threshold&&clearTimeout(this.timer);break;case e.Gestures.EVENT_END:clearTimeout(this.timer)}}},e.Gestures.gestures.Tap={name:"tap",index:100,defaults:{tap_max_touchtime:250,tap_max_distance:10,tap_always:!0,doubletap_distance:20,doubletap_interval:300},handler:function(t,n){if(t.eventType==e.Gestures.EVENT_END&&"touchcancel"!=t.srcEvent.type){var i=e.Gestures.detection.previous,r=!1;if(t.deltaTime>n.options.tap_max_touchtime||t.distance>n.options.tap_max_distance)return;i&&"tap"==i.name&&t.timeStamp-i.lastEvent.timeStamp<n.options.doubletap_interval&&t.distance<n.options.doubletap_distance&&(n.trigger("doubletap",t),r=!0),(!r||n.options.tap_always)&&(e.Gestures.detection.current.name="tap",n.trigger("tap",t))}}},e.Gestures.gestures.Swipe={name:"swipe",index:40,defaults:{swipe_max_touches:1,swipe_velocity:.4},handler:function(t,n){if(t.eventType==e.Gestures.EVENT_END){if(n.options.swipe_max_touches>0&&t.touches.length>n.options.swipe_max_touches)return;(t.velocityX>n.options.swipe_velocity||t.velocityY>n.options.swipe_velocity)&&(n.trigger(this.name,t),n.trigger(this.name+t.direction,t))}}},e.Gestures.gestures.Drag={name:"drag",index:50,defaults:{drag_min_distance:10,correct_for_drag_min_distance:!0,drag_max_touches:1,drag_block_horizontal:!0,drag_block_vertical:!0,drag_lock_to_axis:!1,drag_lock_min_distance:25,prevent_default_directions:[]},triggered:!1,handler:function(t,n){if("touchstart"==t.srcEvent.type||"touchend"==t.srcEvent.type?this.preventedFirstMove=!1:this.preventedFirstMove||"touchmove"!=t.srcEvent.type||(n.options.prevent_default_directions.length>0&&-1!=n.options.prevent_default_directions.indexOf(t.direction)&&t.srcEvent.preventDefault(),this.preventedFirstMove=!0),e.Gestures.detection.current.name!=this.name&&this.triggered)return n.trigger(this.name+"end",t),void(this.triggered=!1);if(!(n.options.drag_max_touches>0&&t.touches.length>n.options.drag_max_touches))switch(t.eventType){case e.Gestures.EVENT_START:this.triggered=!1;break;case e.Gestures.EVENT_MOVE:if(t.distance<n.options.drag_min_distance&&e.Gestures.detection.current.name!=this.name)return;if(e.Gestures.detection.current.name!=this.name&&(e.Gestures.detection.current.name=this.name,n.options.correct_for_drag_min_distance)){var i=Math.abs(n.options.drag_min_distance/t.distance);e.Gestures.detection.current.startEvent.center.pageX+=t.deltaX*i,e.Gestures.detection.current.startEvent.center.pageY+=t.deltaY*i,t=e.Gestures.detection.extendEventData(t)}(e.Gestures.detection.current.lastEvent.drag_locked_to_axis||n.options.drag_lock_to_axis&&n.options.drag_lock_min_distance<=t.distance)&&(t.drag_locked_to_axis=!0);var r=e.Gestures.detection.current.lastEvent.direction;t.drag_locked_to_axis&&r!==t.direction&&(e.Gestures.utils.isVertical(r)?t.direction=t.deltaY<0?e.Gestures.DIRECTION_UP:e.Gestures.DIRECTION_DOWN:t.direction=t.deltaX<0?e.Gestures.DIRECTION_LEFT:e.Gestures.DIRECTION_RIGHT),this.triggered||(n.trigger(this.name+"start",t),this.triggered=!0),n.trigger(this.name,t),n.trigger(this.name+t.direction,t),(n.options.drag_block_vertical&&e.Gestures.utils.isVertical(t.direction)||n.options.drag_block_horizontal&&!e.Gestures.utils.isVertical(t.direction))&&t.preventDefault();break;case e.Gestures.EVENT_END:this.triggered&&n.trigger(this.name+"end",t),this.triggered=!1}}},e.Gestures.gestures.Transform={name:"transform",index:45,defaults:{transform_min_scale:.01,transform_min_rotation:1,transform_always_block:!1},triggered:!1,handler:function(t,n){if(e.Gestures.detection.current.name!=this.name&&this.triggered)return n.trigger(this.name+"end",t),void(this.triggered=!1);if(!(t.touches.length<2))switch(n.options.transform_always_block&&t.preventDefault(),t.eventType){case e.Gestures.EVENT_START:this.triggered=!1;break;case e.Gestures.EVENT_MOVE:var i=Math.abs(1-t.scale),r=Math.abs(t.rotation);if(i<n.options.transform_min_scale&&r<n.options.transform_min_rotation)return;e.Gestures.detection.current.name=this.name,this.triggered||(n.trigger(this.name+"start",t),this.triggered=!0),n.trigger(this.name,t),r>n.options.transform_min_rotation&&n.trigger("rotate",t),i>n.options.transform_min_scale&&(n.trigger("pinch",t),n.trigger("pinch"+(t.scale<1?"in":"out"),t));break;case e.Gestures.EVENT_END:this.triggered&&n.trigger(this.name+"end",t),this.triggered=!1}}},e.Gestures.gestures.Touch={name:"touch",index:-(1/0),defaults:{prevent_default:!1,prevent_mouseevents:!1},handler:function(t,n){return n.options.prevent_mouseevents&&t.pointerType==e.Gestures.POINTER_MOUSE?void t.stopDetect():(n.options.prevent_default&&t.preventDefault(),void(t.eventType==e.Gestures.EVENT_START&&n.trigger(this.name,t)))}},e.Gestures.gestures.Release={name:"release",index:1/0,handler:function(t,n){t.eventType==e.Gestures.EVENT_END&&n.trigger(this.name,t)}}}(window.ionic),function(e,t,n){function i(e){e=e.replace(/[\[]/,"\\[").replace(/[\]]/,"\\]");var t=new RegExp("[\\?&]"+e+"=([^&#]*)"),n=t.exec(location.search);return null===n?"":decodeURIComponent(n[1].replace(/\+/g," "))}function r(){setTimeout(function(){!f.isReady&&f.isWebView()},_)}function a(){f.isWebView()?t.addEventListener("deviceready",o,!1):o(),s&&e.removeEventListener("load",a,!1)}function o(){f.isReady=!0,f.detect();for(var e=0;e<v.length;e++)v[e]();v=[],n.trigger("platformready",{target:t}),h(function(){t.body.classList.add("platform-ready")})}var s,l="ios",c="android",d="windowsphone",u="edge",p="crosswalk",h=n.requestAnimationFrame,f=n.Platform={navigator:e.navigator,isReady:!1,isFullScreen:!1,platforms:null,grade:null,ua:navigator.userAgent,ready:function(e){f.isReady?e():v.push(e)},detect:function(){f._checkPlatforms(),h(function(){for(var e=0;e<f.platforms.length;e++)t.body.classList.add("platform-"+f.platforms[e])})},setGrade:function(e){var n=f.grade;f.grade=e,h(function(){n&&t.body.classList.remove("grade-"+n),t.body.classList.add("grade-"+e)})},device:function(){return e.device||{}},_checkPlatforms:function(){f.platforms=[];var t="a";f.isWebView()?(f.platforms.push("webview"),e.cordova||e.PhoneGap||e.phonegap?f.platforms.push("cordova"):"object"==typeof e.forge&&f.platforms.push("trigger")):f.platforms.push("browser"),f.isIPad()&&f.platforms.push("ipad");var n=f.platform();if(n){f.platforms.push(n);var i=f.version();if(i){var r=i.toString();r.indexOf(".")>0?r=r.replace(".","_"):r+="_0",f.platforms.push(n+r.split("_")[0]),f.platforms.push(n+r),f.isAndroid()&&4.4>i?t=4>i?"c":"b":f.isWindowsPhone()&&(t="b")}}f.setGrade(t)},isWebView:function(){return!(!e.cordova&&!e.PhoneGap&&!e.phonegap&&"object"!==e.forge)},isIPad:function(){return/iPad/i.test(f.navigator.platform)?!0:/iPad/i.test(f.ua)},isIOS:function(){return f.is(l)},isAndroid:function(){return f.is(c)},isWindowsPhone:function(){return f.is(d)},isEdge:function(){return f.is(u)},isCrosswalk:function(){return f.is(p)},platform:function(){return null===m&&f.setPlatform(f.device().platform),m},setPlatform:function(e){m="undefined"!=typeof e&&null!==e&&e.length?e.toLowerCase():i("ionicplatform")?i("ionicplatform"):f.ua.indexOf("Edge")>-1?u:f.ua.indexOf("Windows Phone")>-1?d:f.ua.indexOf("Android")>0?c:/iPhone|iPad|iPod/.test(f.ua)?l:f.navigator.platform&&navigator.platform.toLowerCase().split(" ")[0]||""},version:function(){return null===g&&f.setVersion(f.device().version),g},setVersion:function(e){if("undefined"!=typeof e&&null!==e&&(e=e.split("."),e=parseFloat(e[0]+"."+(e.length>1?e[1]:0)),!isNaN(e)))return void(g=e);g=0;var t=f.platform(),n={android:/Android (\d+).(\d+)?/,ios:/OS (\d+)_(\d+)?/,windowsphone:/Windows Phone (\d+).(\d+)?/};n[t]&&(e=f.ua.match(n[t]),e&&e.length>2&&(g=parseFloat(e[1]+"."+e[2])))},is:function(e){if(e=e.toLowerCase(),f.platforms)for(var t=0;t<f.platforms.length;t++)if(f.platforms[t]===e)return!0;var n=f.platform();return n?n===e.toLowerCase():f.ua.toLowerCase().indexOf(e)>=0},exitApp:function(){f.ready(function(){navigator.app&&navigator.app.exitApp&&navigator.app.exitApp()})},showStatusBar:function(n){f._showStatusBar=n,f.ready(function(){h(function(){f._showStatusBar?(e.StatusBar&&e.StatusBar.show(),t.body.classList.remove("status-bar-hide")):(e.StatusBar&&e.StatusBar.hide(),t.body.classList.add("status-bar-hide"))})})},fullScreen:function(e,i){f.isFullScreen=e!==!1,n.DomUtil.ready(function(){h(function(){f.isFullScreen?t.body.classList.add("fullscreen"):t.body.classList.remove("fullscreen")}),f.showStatusBar(i===!0)})}},m=null,g=null,v=[],_=2e3;r(),"complete"===t.readyState?a():(s=!0,e.addEventListener("load",a,!1))}(window,document,ionic),function(e,t){"use strict";t.CSS={},t.CSS.TRANSITION=[],t.CSS.TRANSFORM=[],t.EVENTS={},function(){var n,i=["webkitTransform","transform","-webkit-transform","webkit-transform","-moz-transform","moz-transform","MozTransform","mozTransform","msTransform"];for(n=0;n<i.length;n++)if(void 0!==e.documentElement.style[i[n]]){t.CSS.TRANSFORM=i[n];break}for(i=["webkitTransition","mozTransition","msTransition","transition"],n=0;n<i.length;n++)if(void 0!==e.documentElement.style[i[n]]){t.CSS.TRANSITION=i[n];break}t.CSS.TRANSITION=t.CSS.TRANSITION||"transition";var r=t.CSS.TRANSITION.indexOf("webkit")>-1;t.CSS.TRANSITION_DURATION=(r?"-webkit-":"")+"transition-duration",t.CSS.TRANSITIONEND=(r?"webkitTransitionEnd ":"")+"transitionend"}(),function(){var e="touchstart",n="touchmove",i="touchend",r="touchcancel"; window.navigator.pointerEnabled?(e="pointerdown",n="pointermove",i="pointerup",r="pointercancel"):window.navigator.msPointerEnabled&&(e="MSPointerDown",n="MSPointerMove",i="MSPointerUp",r="MSPointerCancel"),t.EVENTS.touchstart=e,t.EVENTS.touchmove=n,t.EVENTS.touchend=i,t.EVENTS.touchcancel=r}(),"classList"in e.documentElement||!Object.defineProperty||"undefined"==typeof HTMLElement||Object.defineProperty(HTMLElement.prototype,"classList",{get:function(){function e(e){return function(){var n,i=t.className.split(/\s+/);for(n=0;n<arguments.length;n++)e(i,i.indexOf(arguments[n]),arguments[n]);t.className=i.join(" ")}}var t=this;return{add:e(function(e,t,n){~t||e.push(n)}),remove:e(function(e,t){~t&&e.splice(t,1)}),toggle:e(function(e,t,n){~t?e.splice(t,1):e.push(n)}),contains:function(e){return!!~t.className.split(/\s+/).indexOf(e)},item:function(e){return t.className.split(/\s+/)[e]||null}}}})}(document,ionic);var X,F,B,W,$,q,U,K,j="touchmove",Z=12,Q=50,J={click:i,mousedown:r,mouseup:a,mousemove:o,touchstart:s,touchend:l,touchcancel:d,touchmove:c,pointerdown:s,pointerup:l,pointercancel:d,pointermove:c,MSPointerDown:s,MSPointerUp:l,MSPointerCancel:d,MSPointerMove:c,focusin:m,focusout:g};ionic.tap={register:function(t){return X=t,e("click",!0,!0),e("mouseup"),e("mousedown"),window.navigator.pointerEnabled?(e("pointerdown"),e("pointerup"),e("pointercancel"),j="pointermove"):window.navigator.msPointerEnabled?(e("MSPointerDown"),e("MSPointerUp"),e("MSPointerCancel"),j="MSPointerMove"):(e("touchstart"),e("touchend"),e("touchcancel")),e("focusin"),e("focusout"),function(){for(var t in J)e(t,!1);X=null,F=null,B=!1,$=!1,q=null}},ignoreScrollStart:function(e){return e.defaultPrevented||/^(file|range)$/i.test(e.target.type)||"true"==(e.target.dataset?e.target.dataset.preventScroll:e.target.getAttribute("data-prevent-scroll"))||!!/^(object|embed)$/i.test(e.target.tagName)||ionic.tap.isElementTapDisabled(e.target)},isTextInput:function(e){return!!e&&("TEXTAREA"==e.tagName||"true"===e.contentEditable||"INPUT"==e.tagName&&!/^(radio|checkbox|range|file|submit|reset|color|image|button)$/i.test(e.type))},isDateInput:function(e){return!!e&&"INPUT"==e.tagName&&/^(date|time|datetime-local|month|week)$/i.test(e.type)},isVideo:function(e){return!!e&&"VIDEO"==e.tagName},isKeyboardElement:function(e){return!ionic.Platform.isIOS()||ionic.Platform.isIPad()?ionic.tap.isTextInput(e)&&!ionic.tap.isDateInput(e):ionic.tap.isTextInput(e)||!!e&&"SELECT"==e.tagName},isLabelWithTextInput:function(e){var t=w(e,!1);return!!t&&ionic.tap.isTextInput(T(t))},containsOrIsTextInput:function(e){return ionic.tap.isTextInput(e)||ionic.tap.isLabelWithTextInput(e)},cloneFocusedInput:function(e){ionic.tap.hasCheckedClone||(ionic.tap.hasCheckedClone=!0,ionic.requestAnimationFrame(function(){var t=e.querySelector(":focus");if(ionic.tap.isTextInput(t)&&!ionic.tap.isDateInput(t)){var n=t.cloneNode(!0);n.value=t.value,n.classList.add("cloned-text-input"),n.readOnly=!0,t.isContentEditable&&(n.contentEditable=t.contentEditable,n.innerHTML=t.innerHTML),t.parentElement.insertBefore(n,t),t.classList.add("previous-input-focus"),n.scrollTop=t.scrollTop}}))},hasCheckedClone:!1,removeClonedInputs:function(e){ionic.tap.hasCheckedClone=!1,ionic.requestAnimationFrame(function(){var t,n=e.querySelectorAll(".cloned-text-input"),i=e.querySelectorAll(".previous-input-focus");for(t=0;t<n.length;t++)n[t].parentElement.removeChild(n[t]);for(t=0;t<i.length;t++)i[t].classList.remove("previous-input-focus"),i[t].style.top="",ionic.keyboard.isOpen&&!ionic.keyboard.isClosing&&i[t].focus()})},requiresNativeClick:function(e){return ionic.Platform.isWindowsPhone()&&("A"==e.tagName||"BUTTON"==e.tagName||e.hasAttribute("ng-click")||"INPUT"==e.tagName&&("button"==e.type||"submit"==e.type))?!0:!e||e.disabled||/^(file|range)$/i.test(e.type)||/^(object|video)$/i.test(e.tagName)||ionic.tap.isLabelContainingFileInput(e)?!0:ionic.tap.isElementTapDisabled(e)},isLabelContainingFileInput:function(e){var t=w(e);if("LABEL"!==t.tagName)return!1;var n=t.querySelector("input[type=file]");return n&&n.disabled===!1?!0:!1},isElementTapDisabled:function(e){if(e&&1===e.nodeType)for(var t=e;t;){if(t.getAttribute&&"true"==t.getAttribute("data-tap-disabled"))return!0;t=t.parentElement}return!1},setTolerance:function(e,t){Z=e,Q=t},cancelClick:function(){$=!0},pointerCoord:function(e){var t={x:0,y:0};if(e){var n=e.touches&&e.touches.length?e.touches:[e],i=e.changedTouches&&e.changedTouches[0]||n[0];i&&(t.x=i.clientX||i.pageX||0,t.y=i.clientY||i.pageY||0)}return t}},ionic.DomUtil.ready(function(){var e="undefined"!=typeof angular?angular:null;(!e||e&&!e.scenario)&&ionic.tap.register(document)}),function(e,t){"use strict";function n(){a={},t.requestAnimationFrame(r)}function i(){for(var e in a)a[e]&&(a[e].classList.add(l),o[e]=a[e]);a={}}function r(){if(t.transition&&t.transition.isActive)return void setTimeout(r,400);for(var e in o)o[e]&&(o[e].classList.remove(l),delete o[e])}var a={},o={},s=0,l="activated";t.activator={start:function(e){var n=t.tap.pointerCoord(e).x;n>0&&30>n||t.requestAnimationFrame(function(){if(!(t.scroll&&t.scroll.isScrolling||t.tap.requiresNativeClick(e.target))){for(var n,r=e.target,o=0;6>o&&(r&&1===r.nodeType);o++){if(n&&r.classList&&r.classList.contains("item")){n=r;break}if("A"==r.tagName||"BUTTON"==r.tagName||r.hasAttribute("ng-click")){n=r;break}if(r.classList&&r.classList.contains("button")){n=r;break}if("ION-CONTENT"==r.tagName||r.classList&&r.classList.contains("pane")||"BODY"==r.tagName)break;r=r.parentElement}n&&(a[s]=n,t.requestAnimationFrame(i),s=s>29?0:s+1)}})},end:function(){setTimeout(n,200)}}}(document,ionic),function(e){var t=0;e.Utils={arrayMove:function(e,t,n){if(n>=e.length)for(var i=n-e.length;i--+1;)e.push(void 0);return e.splice(n,0,e.splice(t,1)[0]),e},proxy:function(e,t){var n=Array.prototype.slice.call(arguments,2);return function(){return e.apply(t,n.concat(Array.prototype.slice.call(arguments)))}},debounce:function(e,t,n){var i,r,a,o,s;return function(){a=this,r=arguments,o=new Date;var l=function(){var c=new Date-o;t>c?i=setTimeout(l,t-c):(i=null,n||(s=e.apply(a,r)))},c=n&&!i;return i||(i=setTimeout(l,t)),c&&(s=e.apply(a,r)),s}},throttle:function(e,t,n){var i,r,a,o=null,s=0;n||(n={});var l=function(){s=n.leading===!1?0:Date.now(),o=null,a=e.apply(i,r)};return function(){var c=Date.now();s||n.leading!==!1||(s=c);var d=t-(c-s);return i=this,r=arguments,0>=d?(clearTimeout(o),o=null,s=c,a=e.apply(i,r)):o||n.trailing===!1||(o=setTimeout(l,d)),a}},inherit:function(t,n){var i,r=this;i=t&&t.hasOwnProperty("constructor")?t.constructor:function(){return r.apply(this,arguments)},e.extend(i,r,n);var a=function(){this.constructor=i};return a.prototype=r.prototype,i.prototype=new a,t&&e.extend(i.prototype,t),i.__super__=r.prototype,i},extend:function(e){for(var t=Array.prototype.slice.call(arguments,1),n=0;n<t.length;n++){var i=t[n];if(i)for(var r in i)e[r]=i[r]}return e},nextUid:function(){return"ion"+t++},disconnectScope:function(e){if(e&&e.$root!==e){var t=e.$parent;e.$$disconnected=!0,e.$broadcast("$ionic.disconnectScope",e),t.$$childHead===e&&(t.$$childHead=e.$$nextSibling),t.$$childTail===e&&(t.$$childTail=e.$$prevSibling),e.$$prevSibling&&(e.$$prevSibling.$$nextSibling=e.$$nextSibling),e.$$nextSibling&&(e.$$nextSibling.$$prevSibling=e.$$prevSibling),e.$$nextSibling=e.$$prevSibling=null}},reconnectScope:function(e){if(e&&e.$root!==e&&e.$$disconnected){var t=e.$parent;e.$$disconnected=!1,e.$broadcast("$ionic.reconnectScope",e),e.$$prevSibling=t.$$childTail,t.$$childHead?(t.$$childTail.$$nextSibling=e,t.$$childTail=e):t.$$childHead=t.$$childTail=e}},isScopeDisconnected:function(e){for(var t=e;t;){if(t.$$disconnected)return!0;t=t.$parent}return!1}},e.inherit=e.Utils.inherit,e.extend=e.Utils.extend,e.throttle=e.Utils.throttle,e.proxy=e.Utils.proxy,e.debounce=e.Utils.debounce}(window.ionic);var ee,te,ne,ie,re,ae=0,oe=0,se=0,le=!1,ce="keyboard-open",de="scroll-content",ue=ionic.debounce(E,200,!0),pe=ionic.debounce(y,100,!0);ionic.keyboard={isOpen:!1,isClosing:!1,isOpening:!1,height:0,isLandscape:!1,isInitialized:!1,hide:function(){V()&&cordova.plugins.Keyboard.close(),ee&&ee.blur()},show:function(){V()&&cordova.plugins.Keyboard.show()},disable:function(){V()?(window.removeEventListener("native.keyboardshow",pe),window.removeEventListener("native.keyboardhide",x)):document.body.removeEventListener("focusout",x),document.body.removeEventListener("ionic.focusin",ue),document.body.removeEventListener("focusin",ue),window.removeEventListener("orientationchange",D),window.navigator.msPointerEnabled?document.removeEventListener("MSPointerDown",S):document.removeEventListener("touchstart",S),ionic.keyboard.isInitialized=!1},enable:function(){S()}},ae=G(),ionic.Platform.ready(function(){A(),window.addEventListener("orientationchange",D),setTimeout(A,999),window.navigator.msPointerEnabled?document.addEventListener("MSPointerDown",S,!1):document.addEventListener("touchstart",S,!1)});var he,fe={};ionic.viewport={orientation:function(){return window.innerWidth>window.innerHeight?90:0}},ionic.Platform.ready(function(){R(),window.addEventListener("orientationchange",function(){setTimeout(H,1e3)},!1)}),function(e){"use strict";e.views.View=function(){this.initialize.apply(this,arguments)},e.views.View.inherit=e.inherit,e.extend(e.views.View.prototype,{initialize:function(){}})}(window.ionic);var me={effect:{}};!function(e){var t=Date.now||function(){return+new Date},n=60,i=1e3,r={},a=1;me.effect.Animate={requestAnimationFrame:function(){var t=e.requestAnimationFrame||e.webkitRequestAnimationFrame||e.mozRequestAnimationFrame||e.oRequestAnimationFrame,n=!!t;if(t&&!/requestAnimationFrame\(\)\s*\{\s*\[native code\]\s*\}/i.test(t.toString())&&(n=!1),n)return function(e,n){t(e,n)};var i=60,r={},a=0,o=1,s=null,l=+new Date;return function(e){var t=o++;return r[t]=e,a++,null===s&&(s=setInterval(function(){var e=+new Date,t=r;r={},a=0;for(var n in t)t.hasOwnProperty(n)&&(t[n](e),l=e);e-l>2500&&(clearInterval(s),s=null)},1e3/i)),t}}(),stop:function(e){var t=null!=r[e];return t&&(r[e]=null),t},isRunning:function(e){return null!=r[e]},start:function(e,o,s,l,c,d){var u=t(),p=u,h=0,f=0,m=a++;if(d||(d=document.body),m%20===0){var g={};for(var v in r)g[v]=!0;r=g}var _=function(a){var g=a!==!0,v=t();if(!r[m]||o&&!o(m))return r[m]=null,void(s&&s(n-f/((v-u)/i),m,!1));if(g)for(var w=Math.round((v-p)/(i/n))-1,T=0;T<Math.min(w,4);T++)_(!0),f++;l&&(h=(v-u)/l,h>1&&(h=1));var b=c?c(h):h;e(b,v,g)!==!1&&1!==h||!g?g&&(p=v,me.effect.Animate.requestAnimationFrame(_,d)):(r[m]=null,s&&s(n-f/((v-u)/i),m,1===h||null==l))};return r[m]=!0,me.effect.Animate.requestAnimationFrame(_,d),m}}}(window),function(e){var t=function(){},n=function(e){return Math.pow(e-1,3)+1},i=function(e){return(e/=.5)<1?.5*Math.pow(e,3):.5*(Math.pow(e-2,3)+2)};e.views.Scroll=e.views.View.inherit({initialize:function(n){var i=this;i.__container=n.el,i.__content=n.el.firstElementChild,setTimeout(function(){i.__container&&i.__content&&(i.__container.scrollTop=0,i.__content.scrollTop=0)}),i.options={scrollingX:!1,scrollbarX:!0,scrollingY:!0,scrollbarY:!0,startX:0,startY:0,wheelDampen:6,minScrollbarSizeX:5,minScrollbarSizeY:5,scrollbarsFade:!0,scrollbarFadeDelay:300,scrollbarResizeFadeDelay:1e3,animating:!0,animationDuration:250,decelVelocityThreshold:4,decelVelocityThresholdPaging:4,bouncing:!0,locking:!0,paging:!1,snapping:!1,zooming:!1,minZoom:.5,maxZoom:3,speedMultiplier:1,deceleration:.97,preventDefault:!1,scrollingComplete:t,penetrationDeceleration:.03,penetrationAcceleration:.08,scrollEventInterval:10,freeze:!1,getContentWidth:function(){return Math.max(i.__content.scrollWidth,i.__content.offsetWidth)},getContentHeight:function(){return Math.max(i.__content.scrollHeight,i.__content.offsetHeight+2*i.__content.offsetTop)}};for(var r in n)i.options[r]=n[r];i.hintResize=e.debounce(function(){i.resize()},1e3,!0),i.onScroll=function(){e.scroll.isScrolling?(clearTimeout(i.scrollTimer),i.scrollTimer=setTimeout(i.setScrollStop,80)):setTimeout(i.setScrollStart,50)},i.freeze=function(e){return arguments.length&&(i.options.freeze=e),i.options.freeze},i.freezeShut=i.freeze,i.setScrollStart=function(){e.scroll.isScrolling=Math.abs(e.scroll.lastTop-i.__scrollTop)>1,clearTimeout(i.scrollTimer),i.scrollTimer=setTimeout(i.setScrollStop,80)},i.setScrollStop=function(){e.scroll.isScrolling=!1,e.scroll.lastTop=i.__scrollTop},i.triggerScrollEvent=e.throttle(function(){i.onScroll(),e.trigger("scroll",{scrollTop:i.__scrollTop,scrollLeft:i.__scrollLeft,target:i.__container})},i.options.scrollEventInterval),i.triggerScrollEndEvent=function(){e.trigger("scrollend",{scrollTop:i.__scrollTop,scrollLeft:i.__scrollLeft,target:i.__container})},i.__scrollLeft=i.options.startX,i.__scrollTop=i.options.startY,i.__callback=i.getRenderFn(),i.__initEventHandlers(),i.__createScrollbars()},run:function(){this.resize(),this.__fadeScrollbars("out",this.options.scrollbarResizeFadeDelay)},__isSingleTouch:!1,__isTracking:!1,__didDecelerationComplete:!1,__isGesturing:!1,__isDragging:!1,__isDecelerating:!1,__isAnimating:!1,__clientLeft:0,__clientTop:0,__clientWidth:0,__clientHeight:0,__contentWidth:0,__contentHeight:0,__snapWidth:100,__snapHeight:100,__refreshHeight:null,__refreshActive:!1,__refreshActivate:null,__refreshDeactivate:null,__refreshStart:null,__zoomLevel:1,__scrollLeft:0,__scrollTop:0,__maxScrollLeft:0,__maxScrollTop:0,__scheduledLeft:0,__scheduledTop:0,__scheduledZoom:0,__lastTouchLeft:null,__lastTouchTop:null,__lastTouchMove:null,__positions:null,__minDecelerationScrollLeft:null,__minDecelerationScrollTop:null,__maxDecelerationScrollLeft:null,__maxDecelerationScrollTop:null,__decelerationVelocityX:null,__decelerationVelocityY:null,__transformProperty:null,__perspectiveProperty:null,__indicatorX:null,__indicatorY:null,__scrollbarFadeTimeout:null,__didWaitForSize:null,__sizerTimeout:null,__initEventHandlers:function(){function t(e){return e.touches&&e.touches.length?e.touches:[{pageX:e.pageX,pageY:e.pageY}]}var n,i=this,r=i.__container;if(i.scrollChildIntoView=function(t){var a=r.getBoundingClientRect().bottom;n=r.offsetHeight;var o=i.isShrunkForKeyboard,s=r.parentNode.classList.contains("modal"),l=s&&window.innerWidth>=680;if(!o){if(e.Platform.isIOS()||e.Platform.isFullScreen||l){var c=t.detail.viewportHeight-a,d=Math.max(0,t.detail.keyboardHeight-c);e.requestAnimationFrame(function(){n-=d,r.style.height=n+"px",r.style.overflow="visible",i.resize()})}i.isShrunkForKeyboard=!0}t.detail.isElementUnderKeyboard&&e.requestAnimationFrame(function(){r.scrollTop=0,i.isShrunkForKeyboard&&!o&&(a=r.getBoundingClientRect().bottom);var s=.5*n,l=(t.detail.elementBottom+t.detail.elementTop)/2,c=l-a,d=c+s;d>0&&(e.Platform.isIOS()&&e.tap.cloneFocusedInput(r,i),i.scrollBy(0,d,!0),i.onScroll())}),t.stopPropagation()},i.resetScrollView=function(){i.isShrunkForKeyboard&&(i.isShrunkForKeyboard=!1,r.style.height="",r.style.overflow=""),i.resize()},r.addEventListener("scrollChildIntoView",i.scrollChildIntoView),document.addEventListener("resetScrollView",i.resetScrollView),i.touchStart=function(n){if(i.startCoordinates=e.tap.pointerCoord(n),!e.tap.ignoreScrollStart(n)){if(i.__isDown=!0,e.tap.containsOrIsTextInput(n.target)||"SELECT"===n.target.tagName)return void(i.__hasStarted=!1);i.__isSelectable=!0,i.__enableScrollY=!0,i.__hasStarted=!0,i.doTouchStart(t(n),n.timeStamp),n.preventDefault()}},i.touchMove=function(n){if(!(i.options.freeze||!i.__isDown||!i.__isDown&&n.defaultPrevented||"TEXTAREA"===n.target.tagName&&n.target.parentElement.querySelector(":focus"))){if(!i.__hasStarted&&(e.tap.containsOrIsTextInput(n.target)||"SELECT"===n.target.tagName))return i.__hasStarted=!0,i.doTouchStart(t(n),n.timeStamp),void n.preventDefault();if(i.startCoordinates){var a=e.tap.pointerCoord(n);i.__isSelectable&&e.tap.isTextInput(n.target)&&Math.abs(i.startCoordinates.x-a.x)>20&&(i.__enableScrollY=!1,i.__isSelectable=!0),i.__enableScrollY&&Math.abs(i.startCoordinates.y-a.y)>10&&(i.__isSelectable=!1,e.tap.cloneFocusedInput(r,i))}i.doTouchMove(t(n),n.timeStamp,n.scale),i.__isDown=!0}},i.touchMoveBubble=function(e){i.__isDown&&i.options.preventDefault&&e.preventDefault()},i.touchEnd=function(t){i.__isDown&&(i.doTouchEnd(t,t.timeStamp),i.__isDown=!1,i.__hasStarted=!1,i.__isSelectable=!0,i.__enableScrollY=!0,i.__isDragging||i.__isDecelerating||i.__isAnimating||e.tap.removeClonedInputs(r,i))},i.mouseWheel=e.animationFrameThrottle(function(t){var n=e.DomUtil.getParentOrSelfWithClass(t.target,"ionic-scroll");i.options.freeze||n!==i.__container||(i.hintResize(),i.scrollBy((t.wheelDeltaX||t.deltaX||0)/i.options.wheelDampen,(-t.wheelDeltaY||t.deltaY||0)/i.options.wheelDampen),i.__fadeScrollbars("in"),clearTimeout(i.__wheelHideBarTimeout),i.__wheelHideBarTimeout=setTimeout(function(){i.__fadeScrollbars("out")},100))}),"ontouchstart"in window)r.addEventListener("touchstart",i.touchStart,!1),i.options.preventDefault&&r.addEventListener("touchmove",i.touchMoveBubble,!1),document.addEventListener("touchmove",i.touchMove,!1),document.addEventListener("touchend",i.touchEnd,!1),document.addEventListener("touchcancel",i.touchEnd,!1),document.addEventListener("wheel",i.mouseWheel,!1);else if(window.navigator.pointerEnabled)r.addEventListener("pointerdown",i.touchStart,!1),i.options.preventDefault&&r.addEventListener("pointermove",i.touchMoveBubble,!1),document.addEventListener("pointermove",i.touchMove,!1),document.addEventListener("pointerup",i.touchEnd,!1),document.addEventListener("pointercancel",i.touchEnd,!1),document.addEventListener("wheel",i.mouseWheel,!1);else if(window.navigator.msPointerEnabled)r.addEventListener("MSPointerDown",i.touchStart,!1),i.options.preventDefault&&r.addEventListener("MSPointerMove",i.touchMoveBubble,!1),document.addEventListener("MSPointerMove",i.touchMove,!1),document.addEventListener("MSPointerUp",i.touchEnd,!1),document.addEventListener("MSPointerCancel",i.touchEnd,!1),document.addEventListener("wheel",i.mouseWheel,!1);else{var a=!1;i.mouseDown=function(n){e.tap.ignoreScrollStart(n)||"SELECT"===n.target.tagName||(i.doTouchStart(t(n),n.timeStamp),e.tap.isTextInput(n.target)||n.preventDefault(),a=!0)},i.mouseMove=function(e){i.options.freeze||!a||!a&&e.defaultPrevented||(i.doTouchMove(t(e),e.timeStamp),a=!0)},i.mouseMoveBubble=function(e){a&&i.options.preventDefault&&e.preventDefault()},i.mouseUp=function(e){a&&(i.doTouchEnd(e,e.timeStamp),a=!1)},r.addEventListener("mousedown",i.mouseDown,!1),i.options.preventDefault&&r.addEventListener("mousemove",i.mouseMoveBubble,!1),document.addEventListener("mousemove",i.mouseMove,!1),document.addEventListener("mouseup",i.mouseUp,!1),document.addEventListener("mousewheel",i.mouseWheel,!1),document.addEventListener("wheel",i.mouseWheel,!1)}},__cleanup:function(){var n=this,i=n.__container;i.removeEventListener("touchstart",n.touchStart),i.removeEventListener("touchmove",n.touchMoveBubble),document.removeEventListener("touchmove",n.touchMove),document.removeEventListener("touchend",n.touchEnd),document.removeEventListener("touchcancel",n.touchEnd),i.removeEventListener("pointerdown",n.touchStart),i.removeEventListener("pointermove",n.touchMoveBubble),document.removeEventListener("pointermove",n.touchMove),document.removeEventListener("pointerup",n.touchEnd),document.removeEventListener("pointercancel",n.touchEnd),i.removeEventListener("MSPointerDown",n.touchStart),i.removeEventListener("MSPointerMove",n.touchMoveBubble),document.removeEventListener("MSPointerMove",n.touchMove),document.removeEventListener("MSPointerUp",n.touchEnd),document.removeEventListener("MSPointerCancel",n.touchEnd),i.removeEventListener("mousedown",n.mouseDown),i.removeEventListener("mousemove",n.mouseMoveBubble),document.removeEventListener("mousemove",n.mouseMove),document.removeEventListener("mouseup",n.mouseUp),document.removeEventListener("mousewheel",n.mouseWheel),document.removeEventListener("wheel",n.mouseWheel),i.removeEventListener("scrollChildIntoView",n.scrollChildIntoView),document.removeEventListener("resetScrollView",n.resetScrollView),e.tap.removeClonedInputs(i,n),delete n.__container,delete n.__content,delete n.__indicatorX,delete n.__indicatorY,delete n.options.el,n.__callback=n.scrollChildIntoView=n.resetScrollView=t,n.mouseMove=n.mouseDown=n.mouseUp=n.mouseWheel=n.touchStart=n.touchMove=n.touchEnd=n.touchCancel=t,n.resize=n.scrollTo=n.zoomTo=n.__scrollingComplete=t,i=null},__createScrollbar:function(e){var t=document.createElement("div"),n=document.createElement("div");return n.className="scroll-bar-indicator scroll-bar-fade-out","h"==e?t.className="scroll-bar scroll-bar-h":t.className="scroll-bar scroll-bar-v",t.appendChild(n),t},__createScrollbars:function(){var e,t,n=this;n.options.scrollingX&&(e={el:n.__createScrollbar("h"),sizeRatio:1},e.indicator=e.el.children[0],n.options.scrollbarX&&n.__container.appendChild(e.el),n.__indicatorX=e),n.options.scrollingY&&(t={el:n.__createScrollbar("v"),sizeRatio:1},t.indicator=t.el.children[0],n.options.scrollbarY&&n.__container.appendChild(t.el),n.__indicatorY=t)},__resizeScrollbars:function(){var t=this;if(t.__indicatorX){var n=Math.max(Math.round(t.__clientWidth*t.__clientWidth/t.__contentWidth),20);n>t.__contentWidth&&(n=0),n!==t.__indicatorX.size&&e.requestAnimationFrame(function(){t.__indicatorX.indicator.style.width=n+"px"}),t.__indicatorX.size=n,t.__indicatorX.minScale=t.options.minScrollbarSizeX/n,t.__indicatorX.maxPos=t.__clientWidth-n,t.__indicatorX.sizeRatio=t.__maxScrollLeft?t.__indicatorX.maxPos/t.__maxScrollLeft:1}if(t.__indicatorY){var i=Math.max(Math.round(t.__clientHeight*t.__clientHeight/t.__contentHeight),20);i>t.__contentHeight&&(i=0),i!==t.__indicatorY.size&&e.requestAnimationFrame(function(){t.__indicatorY&&(t.__indicatorY.indicator.style.height=i+"px")}),t.__indicatorY.size=i,t.__indicatorY.minScale=t.options.minScrollbarSizeY/i,t.__indicatorY.maxPos=t.__clientHeight-i,t.__indicatorY.sizeRatio=t.__maxScrollTop?t.__indicatorY.maxPos/t.__maxScrollTop:1}},__repositionScrollbars:function(){var e,t,n,i,r,a,o=this,s=0,l=0;if(o.__indicatorX){o.__indicatorY&&(s=10),r=Math.round(o.__indicatorX.sizeRatio*o.__scrollLeft)||0,n=o.__scrollLeft-(o.__maxScrollLeft-s),o.__scrollLeft<0?(t=Math.max(o.__indicatorX.minScale,(o.__indicatorX.size-Math.abs(o.__scrollLeft))/o.__indicatorX.size),r=0,o.__indicatorX.indicator.style[o.__transformOriginProperty]="left center"):n>0?(t=Math.max(o.__indicatorX.minScale,(o.__indicatorX.size-n)/o.__indicatorX.size),r=o.__indicatorX.maxPos-s,o.__indicatorX.indicator.style[o.__transformOriginProperty]="right center"):(r=Math.min(o.__maxScrollLeft,Math.max(0,r)),t=1);var c="translate3d("+r+"px, 0, 0) scaleX("+t+")";o.__indicatorX.transformProp!==c&&(o.__indicatorX.indicator.style[o.__transformProperty]=c,o.__indicatorX.transformProp=c)}if(o.__indicatorY){a=Math.round(o.__indicatorY.sizeRatio*o.__scrollTop)||0,o.__indicatorX&&(l=10),i=o.__scrollTop-(o.__maxScrollTop-l),o.__scrollTop<0?(e=Math.max(o.__indicatorY.minScale,(o.__indicatorY.size-Math.abs(o.__scrollTop))/o.__indicatorY.size),a=0,"center top"!==o.__indicatorY.originProp&&(o.__indicatorY.indicator.style[o.__transformOriginProperty]="center top",o.__indicatorY.originProp="center top")):i>0?(e=Math.max(o.__indicatorY.minScale,(o.__indicatorY.size-i)/o.__indicatorY.size),a=o.__indicatorY.maxPos-l,"center bottom"!==o.__indicatorY.originProp&&(o.__indicatorY.indicator.style[o.__transformOriginProperty]="center bottom",o.__indicatorY.originProp="center bottom")):(a=Math.min(o.__maxScrollTop,Math.max(0,a)),e=1);var d="translate3d(0,"+a+"px, 0) scaleY("+e+")";o.__indicatorY.transformProp!==d&&(o.__indicatorY.indicator.style[o.__transformProperty]=d,o.__indicatorY.transformProp=d)}},__fadeScrollbars:function(e,t){var n=this;if(n.options.scrollbarsFade){var i="scroll-bar-fade-out";n.options.scrollbarsFade===!0&&(clearTimeout(n.__scrollbarFadeTimeout),"in"==e?(n.__indicatorX&&n.__indicatorX.indicator.classList.remove(i),n.__indicatorY&&n.__indicatorY.indicator.classList.remove(i)):n.__scrollbarFadeTimeout=setTimeout(function(){n.__indicatorX&&n.__indicatorX.indicator.classList.add(i),n.__indicatorY&&n.__indicatorY.indicator.classList.add(i)},t||n.options.scrollbarFadeDelay))}},__scrollingComplete:function(){this.options.scrollingComplete(),e.tap.removeClonedInputs(this.__container,this),this.__fadeScrollbars("out")},resize:function(e){var t=this;t.__container&&t.options&&t.setDimensions(t.__container.clientWidth,t.__container.clientHeight,t.options.getContentWidth(),t.options.getContentHeight(),e)},getRenderFn:function(){var e,t=this,n=t.__content,i=document.documentElement.style;"MozAppearance"in i?e="gecko":"WebkitAppearance"in i?e="webkit":"string"==typeof navigator.cpuClass&&(e="trident");var r,a={trident:"ms",gecko:"Moz",webkit:"Webkit",presto:"O"}[e],o=document.createElement("div"),s=a+"Perspective",l=a+"Transform",c=a+"TransformOrigin";return t.__perspectiveProperty=l,t.__transformProperty=l,t.__transformOriginProperty=c,o.style[s]!==r?function(e,i,r,a){var o="translate3d("+-e+"px,"+-i+"px,0) scale("+r+")";o!==t.contentTransform&&(n.style[l]=o,t.contentTransform=o),t.__repositionScrollbars(),a||t.triggerScrollEvent()}:o.style[l]!==r?function(e,i,r,a){n.style[l]="translate("+-e+"px,"+-i+"px) scale("+r+")",t.__repositionScrollbars(),a||t.triggerScrollEvent()}:function(e,i,r,a){n.style.marginLeft=e?-e/r+"px":"",n.style.marginTop=i?-i/r+"px":"",n.style.zoom=r||"",t.__repositionScrollbars(),a||t.triggerScrollEvent()}},setDimensions:function(e,t,n,i,r){var a=this;(e||t||n||i)&&(e===+e&&(a.__clientWidth=e),t===+t&&(a.__clientHeight=t),n===+n&&(a.__contentWidth=n),i===+i&&(a.__contentHeight=i),a.__computeScrollMax(),a.__resizeScrollbars(),r||a.scrollTo(a.__scrollLeft,a.__scrollTop,!0,null,!0))},setPosition:function(e,t){this.__clientLeft=e||0,this.__clientTop=t||0},setSnapSize:function(e,t){this.__snapWidth=e,this.__snapHeight=t},activatePullToRefresh:function(t,n){var i=this;i.__refreshHeight=t,i.__refreshActivate=function(){e.requestAnimationFrame(n.activate)},i.__refreshDeactivate=function(){e.requestAnimationFrame(n.deactivate)},i.__refreshStart=function(){e.requestAnimationFrame(n.start)},i.__refreshShow=function(){e.requestAnimationFrame(n.show)},i.__refreshHide=function(){e.requestAnimationFrame(n.hide)},i.__refreshTail=function(){e.requestAnimationFrame(n.tail)},i.__refreshTailTime=100,i.__minSpinTime=600},triggerPullToRefresh:function(){this.__publish(this.__scrollLeft,-this.__refreshHeight,this.__zoomLevel,!0);var e=new Date;this.refreshStartTime=e.getTime(),this.__refreshStart&&this.__refreshStart()},finishPullToRefresh:function(){var e=this,t=new Date,n=0;e.refreshStartTime+e.__minSpinTime>t.getTime()&&(n=e.refreshStartTime+e.__minSpinTime-t.getTime()),setTimeout(function(){e.__refreshTail&&e.__refreshTail(),setTimeout(function(){e.__refreshActive=!1,e.__refreshDeactivate&&e.__refreshDeactivate(),e.__refreshHide&&e.__refreshHide(),e.scrollTo(e.__scrollLeft,e.__scrollTop,!0)},e.__refreshTailTime)},n)},getValues:function(){return{left:this.__scrollLeft,top:this.__scrollTop,zoom:this.__zoomLevel}},getScrollMax:function(){return{left:this.__maxScrollLeft,top:this.__maxScrollTop}},zoomTo:function(e,t,n,i){var r=this;if(!r.options.zooming)throw new Error("Zooming is not enabled!");r.__isDecelerating&&(me.effect.Animate.stop(r.__isDecelerating),r.__isDecelerating=!1);var a=r.__zoomLevel;null==n&&(n=r.__clientWidth/2),null==i&&(i=r.__clientHeight/2),e=Math.max(Math.min(e,r.options.maxZoom),r.options.minZoom),r.__computeScrollMax(e);var o=(n+r.__scrollLeft)*e/a-n,s=(i+r.__scrollTop)*e/a-i;o>r.__maxScrollLeft?o=r.__maxScrollLeft:0>o&&(o=0),s>r.__maxScrollTop?s=r.__maxScrollTop:0>s&&(s=0),r.__publish(o,s,e,t)},zoomBy:function(e,t,n,i){this.zoomTo(this.__zoomLevel*e,t,n,i)},scrollTo:function(e,t,n,i,r){var a=this;if(a.__isDecelerating&&(me.effect.Animate.stop(a.__isDecelerating),a.__isDecelerating=!1),null!=i&&i!==a.__zoomLevel){if(!a.options.zooming)throw new Error("Zooming is not enabled!");e*=i,t*=i,a.__computeScrollMax(i)}else i=a.__zoomLevel;a.options.scrollingX?a.options.paging?e=Math.round(e/a.__clientWidth)*a.__clientWidth:a.options.snapping&&(e=Math.round(e/a.__snapWidth)*a.__snapWidth):e=a.__scrollLeft,a.options.scrollingY?a.options.paging?t=Math.round(t/a.__clientHeight)*a.__clientHeight:a.options.snapping&&(t=Math.round(t/a.__snapHeight)*a.__snapHeight):t=a.__scrollTop,e=Math.max(Math.min(a.__maxScrollLeft,e),0),t=Math.max(Math.min(a.__maxScrollTop,t),0),e===a.__scrollLeft&&t===a.__scrollTop&&(n=!1),a.__publish(e,t,i,n,r)},scrollBy:function(e,t,n){var i=this,r=i.__isAnimating?i.__scheduledLeft:i.__scrollLeft,a=i.__isAnimating?i.__scheduledTop:i.__scrollTop;i.scrollTo(r+(e||0),a+(t||0),n)},doMouseZoom:function(e,t,n,i){var r=e>0?.97:1.03;return this.zoomTo(this.__zoomLevel*r,!1,n-this.__clientLeft,i-this.__clientTop)},doTouchStart:function(e,t){var n=this;n.__decStopped=!(!n.__isDecelerating&&!n.__isAnimating),n.hintResize(),t instanceof Date&&(t=t.valueOf()),"number"!=typeof t&&(t=Date.now()),n.__interruptedAnimation=!0,n.__isDecelerating&&(me.effect.Animate.stop(n.__isDecelerating),n.__isDecelerating=!1,n.__interruptedAnimation=!0),n.__isAnimating&&(me.effect.Animate.stop(n.__isAnimating),n.__isAnimating=!1,n.__interruptedAnimation=!0);var i,r,a=1===e.length;a?(i=e[0].pageX,r=e[0].pageY):(i=Math.abs(e[0].pageX+e[1].pageX)/2,r=Math.abs(e[0].pageY+e[1].pageY)/2),n.__initialTouchLeft=i,n.__initialTouchTop=r,n.__initialTouches=e,n.__zoomLevelStart=n.__zoomLevel,n.__lastTouchLeft=i,n.__lastTouchTop=r,n.__lastTouchMove=t,n.__lastScale=1,n.__enableScrollX=!a&&n.options.scrollingX,n.__enableScrollY=!a&&n.options.scrollingY,n.__isTracking=!0,n.__didDecelerationComplete=!1,n.__isDragging=!a,n.__isSingleTouch=a,n.__positions=[]},doTouchMove:function(e,t,n){t instanceof Date&&(t=t.valueOf()),"number"!=typeof t&&(t=Date.now());var i=this;if(i.__isTracking){var r,a;2===e.length?(r=Math.abs(e[0].pageX+e[1].pageX)/2,a=Math.abs(e[0].pageY+e[1].pageY)/2,!n&&i.options.zooming&&(n=i.__getScale(i.__initialTouches,e))):(r=e[0].pageX,a=e[0].pageY);var o=i.__positions;if(i.__isDragging){i.__decStopped=!1;var s=r-i.__lastTouchLeft,l=a-i.__lastTouchTop,c=i.__scrollLeft,d=i.__scrollTop,u=i.__zoomLevel;if(null!=n&&i.options.zooming){var p=u;if(u=u/i.__lastScale*n,u=Math.max(Math.min(u,i.options.maxZoom),i.options.minZoom),p!==u){var h=r-i.__clientLeft,f=a-i.__clientTop;c=(h+c)*u/p-h,d=(f+d)*u/p-f,i.__computeScrollMax(u)}}if(i.__enableScrollX){c-=s*i.options.speedMultiplier;var m=i.__maxScrollLeft;(c>m||0>c)&&(i.options.bouncing?c+=s/2*i.options.speedMultiplier:c=c>m?m:0)}if(i.__enableScrollY){d-=l*i.options.speedMultiplier;var g=i.__maxScrollTop;d>g||0>d?i.options.bouncing||i.__refreshHeight&&0>d?(d+=l/2*i.options.speedMultiplier,i.__enableScrollX||null==i.__refreshHeight||(0>d?(i.__refreshHidden=!1,i.__refreshShow()):(i.__refreshHide(),i.__refreshHidden=!0),!i.__refreshActive&&d<=-i.__refreshHeight?(i.__refreshActive=!0,i.__refreshActivate&&i.__refreshActivate()):i.__refreshActive&&d>-i.__refreshHeight&&(i.__refreshActive=!1,i.__refreshDeactivate&&i.__refreshDeactivate()))):d=d>g?g:0:i.__refreshHeight&&!i.__refreshHidden&&(i.__refreshHide(),i.__refreshHidden=!0)}o.length>60&&o.splice(0,30),o.push(c,d,t),i.__publish(c,d,u)}else{var v=i.options.locking?3:0,_=5,w=Math.abs(r-i.__initialTouchLeft),T=Math.abs(a-i.__initialTouchTop);i.__enableScrollX=i.options.scrollingX&&w>=v,i.__enableScrollY=i.options.scrollingY&&T>=v,o.push(i.__scrollLeft,i.__scrollTop,t),i.__isDragging=(i.__enableScrollX||i.__enableScrollY)&&(w>=_||T>=_),i.__isDragging&&(i.__interruptedAnimation=!1,i.__fadeScrollbars("in"))}i.__lastTouchLeft=r,i.__lastTouchTop=a,i.__lastTouchMove=t,i.__lastScale=n}},doTouchEnd:function(t,n){n instanceof Date&&(n=n.valueOf()),"number"!=typeof n&&(n=Date.now());var i=this;if(i.__isTracking){if(i.__isTracking=!1,i.__isDragging)if(i.__isDragging=!1,i.__isSingleTouch&&i.options.animating&&n-i.__lastTouchMove<=100){for(var r=i.__positions,a=r.length-1,o=a,s=a;s>0&&r[s]>i.__lastTouchMove-100;s-=3)o=s; if(o!==a){var l=r[a]-r[o],c=i.__scrollLeft-r[o-2],d=i.__scrollTop-r[o-1];i.__decelerationVelocityX=c/l*(1e3/60),i.__decelerationVelocityY=d/l*(1e3/60);var u=i.options.paging||i.options.snapping?i.options.decelVelocityThresholdPaging:i.options.decelVelocityThreshold;(Math.abs(i.__decelerationVelocityX)>u||Math.abs(i.__decelerationVelocityY)>u)&&(i.__refreshActive||i.__startDeceleration(n))}else i.__scrollingComplete()}else n-i.__lastTouchMove>100&&i.__scrollingComplete();else i.__decStopped&&(t.isTapHandled=!0,i.__decStopped=!1);if(!i.__isDecelerating)if(i.__refreshActive&&i.__refreshStart){i.__publish(i.__scrollLeft,-i.__refreshHeight,i.__zoomLevel,!0);var p=new Date;i.refreshStartTime=p.getTime(),i.__refreshStart&&i.__refreshStart(),e.Platform.isAndroid()||i.__startDeceleration()}else(i.__interruptedAnimation||i.__isDragging)&&i.__scrollingComplete(),i.scrollTo(i.__scrollLeft,i.__scrollTop,!0,i.__zoomLevel),i.__refreshActive&&(i.__refreshActive=!1,i.__refreshDeactivate&&i.__refreshDeactivate());i.__positions.length=0}},__publish:function(e,t,r,a,o){var s=this,l=s.__isAnimating;if(l&&(me.effect.Animate.stop(l),s.__isAnimating=!1),a&&s.options.animating){s.__scheduledLeft=e,s.__scheduledTop=t,s.__scheduledZoom=r;var c=s.__scrollLeft,d=s.__scrollTop,u=s.__zoomLevel,p=e-c,h=t-d,f=r-u,m=function(e,t,n){n&&(s.__scrollLeft=c+p*e,s.__scrollTop=d+h*e,s.__zoomLevel=u+f*e,s.__callback&&s.__callback(s.__scrollLeft,s.__scrollTop,s.__zoomLevel,o))},g=function(e){return s.__isAnimating===e},v=function(e,t,n){t===s.__isAnimating&&(s.__isAnimating=!1),(s.__didDecelerationComplete||n)&&s.__scrollingComplete(),s.options.zooming&&s.__computeScrollMax()};s.__isAnimating=me.effect.Animate.start(m,g,v,s.options.animationDuration,l?n:i)}else s.__scheduledLeft=s.__scrollLeft=e,s.__scheduledTop=s.__scrollTop=t,s.__scheduledZoom=s.__zoomLevel=r,s.__callback&&s.__callback(e,t,r,o),s.options.zooming&&s.__computeScrollMax()},__computeScrollMax:function(e){var t=this;null==e&&(e=t.__zoomLevel),t.__maxScrollLeft=Math.max(t.__contentWidth*e-t.__clientWidth,0),t.__maxScrollTop=Math.max(t.__contentHeight*e-t.__clientHeight,0),t.__didWaitForSize||t.__maxScrollLeft||t.__maxScrollTop||(t.__didWaitForSize=!0,t.__waitForSize())},__waitForSize:function(){var e=this;clearTimeout(e.__sizerTimeout);var t=function(){e.resize(!0)};t(),e.__sizerTimeout=setTimeout(t,500)},__startDeceleration:function(){var e=this;if(e.options.paging){var t=Math.max(Math.min(e.__scrollLeft,e.__maxScrollLeft),0),n=Math.max(Math.min(e.__scrollTop,e.__maxScrollTop),0),i=e.__clientWidth,r=e.__clientHeight;e.__minDecelerationScrollLeft=Math.floor(t/i)*i,e.__minDecelerationScrollTop=Math.floor(n/r)*r,e.__maxDecelerationScrollLeft=Math.ceil(t/i)*i,e.__maxDecelerationScrollTop=Math.ceil(n/r)*r}else e.__minDecelerationScrollLeft=0,e.__minDecelerationScrollTop=0,e.__maxDecelerationScrollLeft=e.__maxScrollLeft,e.__maxDecelerationScrollTop=e.__maxScrollTop,e.__refreshActive&&(e.__minDecelerationScrollTop=-1*e.__refreshHeight);var a=function(t,n,i){e.__stepThroughDeceleration(i)};e.__minVelocityToKeepDecelerating=e.options.snapping?4:.1;var o=function(){var t=Math.abs(e.__decelerationVelocityX)>=e.__minVelocityToKeepDecelerating||Math.abs(e.__decelerationVelocityY)>=e.__minVelocityToKeepDecelerating;return t||(e.__didDecelerationComplete=!0,e.options.bouncing&&!e.__refreshActive&&e.scrollTo(Math.min(Math.max(e.__scrollLeft,0),e.__maxScrollLeft),Math.min(Math.max(e.__scrollTop,0),e.__maxScrollTop),e.__refreshActive)),t},s=function(){e.__isDecelerating=!1,e.__didDecelerationComplete&&e.__scrollingComplete(),e.options.paging&&e.scrollTo(e.__scrollLeft,e.__scrollTop,e.options.snapping)};e.__isDecelerating=me.effect.Animate.start(a,o,s)},__stepThroughDeceleration:function(e){var t=this,n=t.__scrollLeft+t.__decelerationVelocityX,i=t.__scrollTop+t.__decelerationVelocityY;if(!t.options.bouncing){var r=Math.max(Math.min(t.__maxDecelerationScrollLeft,n),t.__minDecelerationScrollLeft);r!==n&&(n=r,t.__decelerationVelocityX=0);var a=Math.max(Math.min(t.__maxDecelerationScrollTop,i),t.__minDecelerationScrollTop);a!==i&&(i=a,t.__decelerationVelocityY=0)}if(e?t.__publish(n,i,t.__zoomLevel):(t.__scrollLeft=n,t.__scrollTop=i),!t.options.paging){var o=t.options.deceleration;t.__decelerationVelocityX*=o,t.__decelerationVelocityY*=o}if(t.options.bouncing){var s=0,l=0,c=t.options.penetrationDeceleration,d=t.options.penetrationAcceleration;if(n<t.__minDecelerationScrollLeft?s=t.__minDecelerationScrollLeft-n:n>t.__maxDecelerationScrollLeft&&(s=t.__maxDecelerationScrollLeft-n),i<t.__minDecelerationScrollTop?l=t.__minDecelerationScrollTop-i:i>t.__maxDecelerationScrollTop&&(l=t.__maxDecelerationScrollTop-i),0!==s){var u=s*t.__decelerationVelocityX<=t.__minDecelerationScrollLeft;u&&(t.__decelerationVelocityX+=s*c);var p=Math.abs(t.__decelerationVelocityX)<=t.__minVelocityToKeepDecelerating;(!u||p)&&(t.__decelerationVelocityX=s*d)}if(0!==l){var h=l*t.__decelerationVelocityY<=t.__minDecelerationScrollTop;h&&(t.__decelerationVelocityY+=l*c);var f=Math.abs(t.__decelerationVelocityY)<=t.__minVelocityToKeepDecelerating;(!h||f)&&(t.__decelerationVelocityY=l*d)}}},__getDistance:function(e,t){var n=t.pageX-e.pageX,i=t.pageY-e.pageY;return Math.sqrt(n*n+i*i)},__getScale:function(e,t){return e.length>=2&&t.length>=2?this.__getDistance(t[0],t[1])/this.__getDistance(e[0],e[1]):1}}),e.scroll={isScrolling:!1,lastTop:0}}(ionic),function(e){var t=function(){},n=function(e){};e.views.ScrollNative=e.views.View.inherit({initialize:function(t){var n=this;n.__container=n.el=t.el,n.__content=t.el.firstElementChild,n.__frozen=!1,n.isNative=!0,n.__scrollTop=n.el.scrollTop,n.__scrollLeft=n.el.scrollLeft,n.__clientHeight=n.__content.clientHeight,n.__clientWidth=n.__content.clientWidth,n.__maxScrollTop=Math.max(n.__contentHeight-n.__clientHeight,0),n.__maxScrollLeft=Math.max(n.__contentWidth-n.__clientWidth,0),(t.startY>=0||t.startX>=0)&&e.requestAnimationFrame(function(){n.el.scrollTop=t.startY||0,n.el.scrollLeft=t.startX||0,n.__scrollTop=n.el.scrollTop,n.__scrollLeft=n.el.scrollLeft}),n.options={freeze:!1,getContentWidth:function(){return Math.max(n.__content.scrollWidth,n.__content.offsetWidth)},getContentHeight:function(){return Math.max(n.__content.scrollHeight,n.__content.offsetHeight+2*n.__content.offsetTop)}};for(var i in t)n.options[i]=t[i];n.onScroll=function(){e.scroll.isScrolling||(e.scroll.isScrolling=!0),clearTimeout(n.scrollTimer),n.scrollTimer=setTimeout(function(){e.scroll.isScrolling=!1},80)},n.freeze=function(e){n.__frozen=e},n.freezeShut=function(e){n.__frozenShut=e},n.__initEventHandlers()},__callback:function(){n("__callback")},zoomTo:function(){n("zoomTo")},zoomBy:function(){n("zoomBy")},activatePullToRefresh:function(){n("activatePullToRefresh")},resize:function(e){var t=this;t.__container&&t.options&&t.setDimensions(t.__container.clientWidth,t.__container.clientHeight,t.options.getContentWidth(),t.options.getContentHeight(),e)},run:function(){this.resize()},getValues:function(){var e=this;return e.update(),{left:e.__scrollLeft,top:e.__scrollTop,zoom:1}},update:function(){var e=this;e.__scrollLeft=e.el.scrollLeft,e.__scrollTop=e.el.scrollTop},setDimensions:function(e,t,n,i){var r=this;(e||t||n||i)&&(e===+e&&(r.__clientWidth=e),t===+t&&(r.__clientHeight=t),n===+n&&(r.__contentWidth=n),i===+i&&(r.__contentHeight=i),r.__computeScrollMax())},getScrollMax:function(){return{left:this.__maxScrollLeft,top:this.__maxScrollTop}},scrollBy:function(e,t,n){var i=this;i.update();var r=i.__isAnimating?i.__scheduledLeft:i.__scrollLeft,a=i.__isAnimating?i.__scheduledTop:i.__scrollTop;i.scrollTo(r+(e||0),a+(t||0),n)},scrollTo:function(t,n,i){function r(t,n){function i(e){return--e*e*e+1}function r(){var p=Date.now(),h=Math.min(1,(p-l)/c),f=i(h);d!=t&&(a.el.scrollTop=parseInt(f*(t-d)+d,10)),u!=n&&(a.el.scrollLeft=parseInt(f*(n-u)+u,10)),1>h?e.requestAnimationFrame(r):(e.tap.removeClonedInputs(a.__container,a),a.el.style.overflowX=o,a.el.style.overflowY=s,a.resize())}var l=Date.now(),c=250,d=a.el.scrollTop,u=a.el.scrollLeft;return d===t&&u===n?(a.el.style.overflowX=o,a.el.style.overflowY=s,void a.resize()):void e.requestAnimationFrame(r)}var a=this;if(!i)return a.el.scrollTop=n,a.el.scrollLeft=t,void a.resize();var o=a.el.style.overflowX,s=a.el.style.overflowY;clearTimeout(a.__scrollToCleanupTimeout),a.__scrollToCleanupTimeout=setTimeout(function(){a.el.style.overflowX=o,a.el.style.overflowY=s},500),a.el.style.overflowY="hidden",a.el.style.overflowX="hidden",r(n,t)},__waitForSize:function(){var e=this;clearTimeout(e.__sizerTimeout);var t=function(){e.resize(!0)};t(),e.__sizerTimeout=setTimeout(t,500)},__computeScrollMax:function(){var e=this;e.__maxScrollLeft=Math.max(e.__contentWidth-e.__clientWidth,0),e.__maxScrollTop=Math.max(e.__contentHeight-e.__clientHeight,0),e.__didWaitForSize||e.__maxScrollLeft||e.__maxScrollTop||(e.__didWaitForSize=!0,e.__waitForSize())},__initEventHandlers:function(){var t,n,i=this,r=i.__container;i.scrollChildIntoView=function(a){var o=r.getBoundingClientRect();i.__originalContainerHeight||(i.__originalContainerHeight=o.height),t=i.__originalContainerHeight;var s=i.isShrunkForKeyboard,l=r.parentNode.classList.contains("modal"),c=r.parentNode.classList.contains("popover"),d=l&&window.innerWidth>=680,u=n&&n!==a.detail.keyboardHeight;(!s||u)&&(!c&&(e.Platform.isIOS()||e.Platform.isFullScreen||d)&&e.requestAnimationFrame(function(){t=Math.max(0,Math.min(i.__originalContainerHeight,i.__originalContainerHeight-(a.detail.keyboardHeight-43))),r.style.height=t+"px",r.classList.add("keyboard-up"),i.resize()}),i.isShrunkForKeyboard=!0),n=a.detail.keyboardHeight,a.detail.isElementUnderKeyboard&&e.requestAnimationFrame(function(){var t=e.DomUtil.getOffsetTop(a.detail.target);setTimeout(function(){e.Platform.isIOS()&&e.tap.cloneFocusedInput(r,i),i.scrollTo(0,t-(o.top+100),!0),i.onScroll()},32)}),a.stopPropagation()},i.resetScrollView=function(){i.isShrunkForKeyboard&&(i.isShrunkForKeyboard=!1,r.style.height="",i.__originalContainerHeight=r.getBoundingClientRect().height,e.Platform.isIOS()&&e.requestAnimationFrame(function(){r.classList.remove("keyboard-up")})),i.resize()},i.handleTouchMove=function(e){return i.__frozenShut?(e.preventDefault(),e.stopPropagation(),!1):i.__frozen?(e.preventDefault(),!1):!0},r.addEventListener("scroll",i.onScroll),r.addEventListener("scrollChildIntoView",i.scrollChildIntoView),r.addEventListener(e.EVENTS.touchstart,i.handleTouchMove),r.addEventListener(e.EVENTS.touchmove,i.handleTouchMove),document.addEventListener("resetScrollView",i.resetScrollView)},__cleanup:function(){var n=this,i=n.__container;i.removeEventListener("scroll",n.onScroll),i.removeEventListener("scrollChildIntoView",n.scrollChildIntoView),i.removeEventListener(e.EVENTS.touchstart,n.handleTouchMove),i.removeEventListener(e.EVENTS.touchmove,n.handleTouchMove),document.removeEventListener("resetScrollView",n.resetScrollView),e.tap.removeClonedInputs(i,n),delete n.__container,delete n.__content,delete n.__indicatorX,delete n.__indicatorY,delete n.options.el,n.resize=n.scrollTo=n.onScroll=n.resetScrollView=t,n.scrollChildIntoView=t,i=null}})}(ionic),function(e){"use strict";var t="item",n="item-content",i="item-sliding",r="item-options",a="item-placeholder",o="item-reordering",s="item-reorder",l=function(){};l.prototype={start:function(){},drag:function(){},end:function(){},isSameItem:function(){return!1}};var c=function(e){this.dragThresholdX=e.dragThresholdX||10,this.el=e.el,this.item=e.item,this.canSwipe=e.canSwipe};c.prototype=new l,c.prototype.start=function(a){var o,s,l,c;this.canSwipe()&&(o=a.target.classList.contains(n)?a.target:a.target.classList.contains(t)?a.target.querySelector("."+n):e.DomUtil.getParentWithClass(a.target,n),o&&(o.classList.remove(i),l=parseFloat(o.style[e.CSS.TRANSFORM].replace("translate3d(","").split(",")[0])||0,s=o.parentNode.querySelector("."+r),s&&(s.classList.remove("invisible"),c=s.offsetWidth,this._currentDrag={buttons:s,buttonsWidth:c,content:o,startOffsetX:l})))},c.prototype.isSameItem=function(e){return e._lastDrag&&this._currentDrag?this._currentDrag.content==e._lastDrag.content:!1},c.prototype.clean=function(t){function n(){i.buttons&&i.buttons.classList.add("invisible")}var i=this._lastDrag;i&&i.content&&(i.content.style[e.CSS.TRANSITION]="",i.content.style[e.CSS.TRANSFORM]="",t?(i.content.style[e.CSS.TRANSITION]="none",n(),e.requestAnimationFrame(function(){i.content.style[e.CSS.TRANSITION]=""})):e.requestAnimationFrame(function(){setTimeout(n,250)}))},c.prototype.drag=e.animationFrameThrottle(function(t){var n;if(this._currentDrag&&(!this._isDragging&&(Math.abs(t.gesture.deltaX)>this.dragThresholdX||Math.abs(this._currentDrag.startOffsetX)>0)&&(this._isDragging=!0),this._isDragging)){n=this._currentDrag.buttonsWidth;var i=Math.min(0,this._currentDrag.startOffsetX+t.gesture.deltaX);-n>i&&(i=Math.min(-n,-n+.4*(t.gesture.deltaX+n))),this._currentDrag.content.$$ionicOptionsOpen=0!==i,this._currentDrag.content.style[e.CSS.TRANSFORM]="translate3d("+i+"px, 0, 0)",this._currentDrag.content.style[e.CSS.TRANSITION]="none"}}),c.prototype.end=function(t,n){var i=this;if(!i._currentDrag)return void(n&&n());var r=-i._currentDrag.buttonsWidth;t.gesture.deltaX>-(i._currentDrag.buttonsWidth/2)&&("left"==t.gesture.direction&&Math.abs(t.gesture.velocityX)<.3?r=0:"right"==t.gesture.direction&&(r=0)),e.requestAnimationFrame(function(){if(0===r){i._currentDrag.content.style[e.CSS.TRANSFORM]="";var t=i._currentDrag.buttons;setTimeout(function(){t&&t.classList.add("invisible")},250)}else i._currentDrag.content.style[e.CSS.TRANSFORM]="translate3d("+r+"px,0,0)";i._currentDrag.content.style[e.CSS.TRANSITION]="",i._lastDrag||(i._lastDrag={}),e.extend(i._lastDrag,i._currentDrag),i._currentDrag&&(i._currentDrag.buttons=null,i._currentDrag.content=null),i._currentDrag=null,n&&n()})};var d=function(e){var t=this;if(t.dragThresholdY=e.dragThresholdY||0,t.onReorder=e.onReorder,t.listEl=e.listEl,t.el=t.item=e.el,t.scrollEl=e.scrollEl,t.scrollView=e.scrollView,t.listElTrueTop=0,t.listEl.offsetParent){var n=t.listEl;do t.listElTrueTop+=n.offsetTop,n=n.offsetParent;while(n)}};d.prototype=new l,d.prototype._moveElement=function(t){var n=t.gesture.center.pageY+this.scrollView.getValues().top-this._currentDrag.elementHeight/2-this.listElTrueTop;this.el.style[e.CSS.TRANSFORM]="translate3d(0, "+n+"px, 0)"},d.prototype.deregister=function(){this.listEl=this.el=this.scrollEl=this.scrollView=null},d.prototype.start=function(t){var n=e.DomUtil.getChildIndex(this.el,this.el.nodeName.toLowerCase()),i=this.el.scrollHeight,r=this.el.cloneNode(!0);r.classList.add(a),this.el.parentNode.insertBefore(r,this.el),this.el.classList.add(o),this._currentDrag={elementHeight:i,startIndex:n,placeholder:r,scrollHeight:scroll,list:r.parentNode},this._moveElement(t)},d.prototype.drag=e.animationFrameThrottle(function(t){var n=this;if(this._currentDrag){var i=0,r=t.gesture.center.pageY,a=this.listElTrueTop;if(this.scrollView){var o=this.scrollView.__container;i=this.scrollView.getValues().top;var s=o.offsetTop,l=s-r+this._currentDrag.elementHeight/2,c=r+this._currentDrag.elementHeight/2-s-o.offsetHeight;t.gesture.deltaY<0&&l>0&&i>0&&(this.scrollView.scrollBy(null,-l),e.requestAnimationFrame(function(){n.drag(t)})),t.gesture.deltaY>0&&c>0&&i<this.scrollView.getScrollMax().top&&(this.scrollView.scrollBy(null,c),e.requestAnimationFrame(function(){n.drag(t)}))}!this._isDragging&&Math.abs(t.gesture.deltaY)>this.dragThresholdY&&(this._isDragging=!0),this._isDragging&&(this._moveElement(t),this._currentDrag.currentY=i+r-a)}}),d.prototype._getReorderIndex=function(){for(var e,t=this,n=Array.prototype.slice.call(t._currentDrag.placeholder.parentNode.children).filter(function(e){return e.nodeName===t.el.nodeName&&e!==t.el}),i=t._currentDrag.currentY,r=0,a=n.length;a>r;r++)if(e=n[r],r===a-1){if(i>e.offsetTop)return r}else if(0===r){if(i<e.offsetTop+e.offsetHeight)return r}else if(i>e.offsetTop-e.offsetHeight/2&&i<e.offsetTop+e.offsetHeight)return r;return t._currentDrag.startIndex},d.prototype.end=function(t,n){if(!this._currentDrag)return void(n&&n());var i=this._currentDrag.placeholder,r=this._getReorderIndex();this.el.classList.remove(o),this.el.style[e.CSS.TRANSFORM]="",i.parentNode.insertBefore(this.el,i),i.parentNode.removeChild(i),this.onReorder&&this.onReorder(this.el,this._currentDrag.startIndex,r),this._currentDrag={placeholder:null,content:null},this._currentDrag=null,n&&n()},e.views.ListView=e.views.View.inherit({initialize:function(t){var n=this;t=e.extend({onReorder:function(){},virtualRemoveThreshold:-200,virtualAddThreshold:200,canSwipe:function(){return!0}},t),e.extend(n,t),!n.itemHeight&&n.listEl&&(n.itemHeight=n.listEl.children[0]&&parseInt(n.listEl.children[0].style.height,10)),n.onRefresh=t.onRefresh||function(){},n.onRefreshOpening=t.onRefreshOpening||function(){},n.onRefreshHolding=t.onRefreshHolding||function(){};var i={};e.DomUtil.getParentOrSelfWithClass(n.el,"overflow-scroll")&&(i.prevent_default_directions=["left","right"]),window.ionic.onGesture("release",function(e){n._handleEndDrag(e)},n.el,i),window.ionic.onGesture("drag",function(e){n._handleDrag(e)},n.el,i),n._initDrag()},deregister:function(){this.el=this.listEl=this.scrollEl=this.scrollView=null,this.isScrollFreeze&&self.scrollView.freeze(!1)},stopRefreshing:function(){var e=this.el.querySelector(".list-refresher");e.style.height="0"},didScroll:function(e){var t=this;if(t.isVirtual){var n=t.itemHeight,i=e.target.scrollHeight,r=t.el.parentNode.offsetHeight,a=Math.max(0,e.scrollTop+t.virtualRemoveThreshold),o=Math.min(i,Math.abs(e.scrollTop)+r+t.virtualAddThreshold),s=parseInt(Math.abs(a/n),10),l=parseInt(Math.abs(o/n),10);t._virtualItemsToRemove=Array.prototype.slice.call(t.listEl.children,0,s),t.renderViewport&&t.renderViewport(a,o,s,l)}},didStopScrolling:function(){if(this.isVirtual)for(var e=0;e<this._virtualItemsToRemove.length;e++)this.didHideItem&&this.didHideItem(e)},clearDragEffects:function(e){this._lastDragOp&&(this._lastDragOp.clean&&this._lastDragOp.clean(e),this._lastDragOp.deregister&&this._lastDragOp.deregister(),this._lastDragOp=null)},_initDrag:function(){this._lastDragOp&&this._lastDragOp.deregister&&this._lastDragOp.deregister(),this._lastDragOp=this._dragOp,this._dragOp=null},_getItem:function(e){for(;e;){if(e.classList&&e.classList.contains(t))return e;e=e.parentNode}return null},_startDrag:function(t){var n=this;n._isDragging=!1;var i,r=n._lastDragOp;n._didDragUpOrDown&&r instanceof c&&r.clean&&r.clean(),!e.DomUtil.getParentOrSelfWithClass(t.target,s)||"up"!=t.gesture.direction&&"down"!=t.gesture.direction?!n._didDragUpOrDown&&("left"==t.gesture.direction||"right"==t.gesture.direction)&&Math.abs(t.gesture.deltaX)>5&&(i=n._getItem(t.target),i&&i.querySelector(".item-options")&&(n._dragOp=new c({el:n.el,item:i,canSwipe:n.canSwipe}),n._dragOp.start(t),t.preventDefault(),n.isScrollFreeze=n.scrollView.freeze(!0))):(i=n._getItem(t.target),i&&(n._dragOp=new d({listEl:n.el,el:i,scrollEl:n.scrollEl,scrollView:n.scrollView,onReorder:function(e,t,i){n.onReorder&&n.onReorder(e,t,i)}}),n._dragOp.start(t),t.preventDefault())),r&&n._dragOp&&!n._dragOp.isSameItem(r)&&t.defaultPrevented&&r.clean&&r.clean()},_handleEndDrag:function(e){var t=this;t.scrollView&&(t.isScrollFreeze=t.scrollView.freeze(!1)),t._didDragUpOrDown=!1,t._dragOp&&t._dragOp.end(e,function(){t._initDrag()})},_handleDrag:function(e){var t=this;Math.abs(e.gesture.deltaY)>5&&(t._didDragUpOrDown=!0),t.isDragging||t._dragOp||t._startDrag(e),t._dragOp&&(e.gesture.srcEvent.preventDefault(),t._dragOp.drag(e))}})}(ionic),function(e){"use strict";e.views.Modal=e.views.View.inherit({initialize:function(t){t=e.extend({focusFirstInput:!1,unfocusOnHide:!0,focusFirstDelay:600,backdropClickToClose:!0,hardwareBackButtonClose:!0},t),e.extend(this,t),this.el=t.el},show:function(){var e=this;e.focusFirstInput&&window.setTimeout(function(){var t=e.el.querySelector("input, textarea");t&&t.focus&&t.focus()},e.focusFirstDelay)},hide:function(){if(this.unfocusOnHide){var e=this.el.querySelectorAll("input, textarea");window.setTimeout(function(){for(var t=0;t<e.length;t++)e[t].blur&&e[t].blur()})}}})}(ionic),function(e){"use strict";e.views.SideMenu=e.views.View.inherit({initialize:function(e){this.el=e.el,this.isEnabled="undefined"==typeof e.isEnabled?!0:e.isEnabled,this.setWidth(e.width)},getFullWidth:function(){return this.width},setWidth:function(e){this.width=e,this.el.style.width=e+"px"},setIsEnabled:function(e){this.isEnabled=e},bringUp:function(){"0"!==this.el.style.zIndex&&(this.el.style.zIndex="0")},pushDown:function(){"-1"!==this.el.style.zIndex&&(this.el.style.zIndex="-1")}}),e.views.SideMenuContent=e.views.View.inherit({initialize:function(t){e.extend(this,{animationClass:"menu-animated",onDrag:function(){},onEndDrag:function(){}},t),e.onGesture("drag",e.proxy(this._onDrag,this),this.el),e.onGesture("release",e.proxy(this._onEndDrag,this),this.el)},_onDrag:function(e){this.onDrag&&this.onDrag(e)},_onEndDrag:function(e){this.onEndDrag&&this.onEndDrag(e)},disableAnimation:function(){this.el.classList.remove(this.animationClass)},enableAnimation:function(){this.el.classList.add(this.animationClass)},getTranslateX:function(){return parseFloat(this.el.style[e.CSS.TRANSFORM].replace("translate3d(","").split(",")[0])},setTranslateX:e.animationFrameThrottle(function(t){this.el.style[e.CSS.TRANSFORM]="translate3d("+t+"px, 0, 0)"})})}(ionic),function(e){"use strict";e.views.Slider=e.views.View.inherit({initialize:function(e){function t(){if(b.offsetWidth){S=D.children,x=S.length,S.length<2&&(e.continuous=!1),T.transitions&&e.continuous&&S.length<3&&(D.appendChild(S[0].cloneNode(!0)),D.appendChild(D.children[1].cloneNode(!0)),S=D.children),y=new Array(S.length),E=b.offsetWidth||b.getBoundingClientRect().width,D.style.width=S.length*E+"px";for(var t=S.length;t--;){var n=S[t];n.style.width=E+"px",n.setAttribute("data-index",t),T.transitions&&(n.style.left=t*-E+"px",o(t,M>t?-E:t>M?E:0,0))}e.continuous&&T.transitions&&(o(r(M-1),-E,0),o(r(M+1),E,0)),T.transitions||(D.style.left=M*-E+"px"),b.style.visibility="visible",e.slidesChanged&&e.slidesChanged()}}function n(t){e.continuous?a(M-1,t):M&&a(M-1,t)}function i(t){e.continuous?a(M+1,t):M<S.length-1&&a(M+1,t)}function r(e){return(S.length+e%S.length)%S.length}function a(t,n){if(M!=t){if(!S)return void(M=t);if(T.transitions){var i=Math.abs(M-t)/(M-t);if(e.continuous){var a=i;i=-y[r(t)]/E,i!==a&&(t=-i*S.length+t)}for(var s=Math.abs(M-t)-1;s--;)o(r((t>M?t:M)-s-1),E*i,0);t=r(t),o(M,E*i,n||L),o(t,0,n||L),e.continuous&&o(r(t-i),-(E*i),0)}else t=r(t),l(M*-E,t*-E,n||L);M=t,w(e.callback&&e.callback(M,S[M]))}}function o(e,t,n){s(e,t,n),y[e]=t}function s(e,t,n){var i=S[e],r=i&&i.style;r&&(r.webkitTransitionDuration=r.MozTransitionDuration=r.msTransitionDuration=r.OTransitionDuration=r.transitionDuration=n+"ms",r.webkitTransform="translate("+t+"px,0)translateZ(0)",r.msTransform=r.MozTransform=r.OTransform="translateX("+t+"px)")}function l(t,n,i){if(!i)return void(D.style.left=n+"px");var r=+new Date,a=setInterval(function(){var o=+new Date-r;return o>i?(D.style.left=n+"px",I&&c(),e.transitionEnd&&e.transitionEnd.call(event,M,S[M]),void clearInterval(a)):void(D.style.left=(n-t)*(Math.floor(o/i*100)/100)+t+"px")},4)}function c(){C=setTimeout(i,I)}function d(){I=e.auto||0,clearTimeout(C)}var u,p,h,f=this;window.navigator.pointerEnabled?(u="pointerdown",p="pointermove",h="pointerup"):window.navigator.msPointerEnabled?(u="MSPointerDown",p="MSPointerMove",h="MSPointerUp"):(u="touchstart",p="touchmove",h="touchend");var m="mousedown",g="mousemove",v="mouseup",_=function(){},w=function(e){setTimeout(e||_,0)},T={addEventListener:!!window.addEventListener,transitions:function(e){var t=["transitionProperty","WebkitTransition","MozTransition","OTransition","msTransition"];for(var n in t)if(void 0!==e.style[t[n]])return!0;return!1}(document.createElement("swipe"))},b=e.el;if(b){var S,y,E,x,D=b.children[0];e=e||{};var M=parseInt(e.startSlide,10)||0,L=e.speed||300;e.continuous=void 0!==e.continuous?e.continuous:!0;var C,P,I=e.auto||0,k={},N={},z={handleEvent:function(n){switch(!n.touches&&n.pageX&&n.pageY&&(n.touches=[{pageX:n.pageX,pageY:n.pageY}]),n.type){case u:this.start(n);break;case m:this.start(n);break;case p:this.touchmove(n);break;case g:this.touchmove(n);break;case h:w(this.end(n));break;case v:w(this.end(n));break;case"webkitTransitionEnd":case"msTransitionEnd":case"oTransitionEnd":case"otransitionend":case"transitionend":w(this.transitionEnd(n));break;case"resize":w(t)}e.stopPropagation&&n.stopPropagation()},start:function(e){if(e.touches){var t=e.touches[0];k={x:t.pageX,y:t.pageY,time:+new Date},P=void 0,N={},D.addEventListener(p,this,!1),D.addEventListener(g,this,!1),D.addEventListener(h,this,!1),D.addEventListener(v,this,!1),document.addEventListener(h,this,!1),document.addEventListener(v,this,!1)}},touchmove:function(t){if(!(!t.touches||t.touches.length>1||t.scale&&1!==t.scale||f.slideIsDisabled)){e.disableScroll&&t.preventDefault();var n=t.touches[0];N={x:n.pageX-k.x,y:n.pageY-k.y},"undefined"==typeof P&&(P=!!(P||Math.abs(N.x)<Math.abs(N.y))),P||(t.preventDefault(),d(),e.continuous?(s(r(M-1),N.x+y[r(M-1)],0),s(M,N.x+y[M],0),s(r(M+1),N.x+y[r(M+1)],0)):(e.bouncing?N.x=N.x/(!M&&N.x>0||M==S.length-1&&N.x<0?Math.abs(N.x)/E+1:1):(E*M-N.x<0&&(N.x=Math.min(N.x,E*M)),Math.abs(N.x)>E*(S.length-M-1)&&(N.x=Math.max(-E*(S.length-M-1),N.x))),s(M-1,N.x+y[M-1],0),s(M,N.x+y[M],0),s(M+1,N.x+y[M+1],0)),e.onDrag&&e.onDrag())}},end:function(){var t=+new Date-k.time,n=Number(t)<250&&Math.abs(N.x)>20||Math.abs(N.x)>E/2,i=!M&&N.x>0||M==S.length-1&&N.x<0;e.continuous&&(i=!1);var a=N.x<0;P||(n&&!i?(a?(e.continuous?(o(r(M-1),-E,0),o(r(M+2),E,0)):o(M-1,-E,0),o(M,y[M]-E,L),o(r(M+1),y[r(M+1)]-E,L),M=r(M+1)):(e.continuous?(o(r(M+1),E,0),o(r(M-2),-E,0)):o(M+1,E,0),o(M,y[M]+E,L),o(r(M-1),y[r(M-1)]+E,L),M=r(M-1)),e.callback&&e.callback(M,S[M])):e.continuous?(o(r(M-1),-E,L),o(M,0,L),o(r(M+1),E,L)):(o(M-1,-E,L),o(M,0,L),o(M+1,E,L))),D.removeEventListener(p,z,!1),D.removeEventListener(g,z,!1),D.removeEventListener(h,z,!1),D.removeEventListener(v,z,!1),document.removeEventListener(h,z,!1),document.removeEventListener(v,z,!1),e.onDragEnd&&e.onDragEnd()},transitionEnd:function(t){parseInt(t.target.getAttribute("data-index"),10)==M&&(I&&c(),e.transitionEnd&&e.transitionEnd.call(t,M,S[M]))}};this.update=function(){setTimeout(t)},this.setup=function(){t()},this.loop=function(t){return arguments.length&&(e.continuous=!!t),e.continuous},this.enableSlide=function(e){return arguments.length&&(this.slideIsDisabled=!e),!this.slideIsDisabled},this.slide=this.select=function(e,t){d(),a(e,t)},this.prev=this.previous=function(){d(),n()},this.next=function(){d(),i()},this.stop=function(){d()},this.start=function(){c()},this.autoPlay=function(e){!I||0>I?d():(I=e,c())},this.currentIndex=this.selected=function(){return M},this.slidesCount=this.count=function(){return x},this.kill=function(){d(),D.style.width="",D.style.left="",S&&(S=[]),T.addEventListener?(D.removeEventListener(u,z,!1),D.removeEventListener(m,z,!1),D.removeEventListener("webkitTransitionEnd",z,!1),D.removeEventListener("msTransitionEnd",z,!1),D.removeEventListener("oTransitionEnd",z,!1),D.removeEventListener("otransitionend",z,!1),D.removeEventListener("transitionend",z,!1),window.removeEventListener("resize",z,!1)):window.onresize=null},this.load=function(){t(),I&&c(),T.addEventListener?(D.addEventListener(u,z,!1),D.addEventListener(m,z,!1),T.transitions&&(D.addEventListener("webkitTransitionEnd",z,!1),D.addEventListener("msTransitionEnd",z,!1),D.addEventListener("oTransitionEnd",z,!1),D.addEventListener("otransitionend",z,!1),D.addEventListener("transitionend",z,!1)),window.addEventListener("resize",z,!1)):window.onresize=function(){t()}}}}})}(ionic),function(){"use strict";function e(e){e.fn.swiper=function(t){var i;return e(this).each(function(){var e=new n(this,t);i||(i=e)}),i}}var t,n=function(e,r,a,o){function s(){return"horizontal"===y.params.direction}function l(e){return Math.floor(e)}function c(){y.autoplayTimeoutId=setTimeout(function(){y.params.loop?(y.fixLoop(),y._slideNext()):y.isEnd?r.autoplayStopOnLast?y.stopAutoplay():y._slideTo(0):y._slideNext()},y.params.autoplay)}function d(e,n){var i=t(e.target);if(!i.is(n))if("string"==typeof n)i=i.parents(n);else if(n.nodeType){var r;return i.parents().each(function(e,t){t===n&&(r=n)}),r?n:void 0}return 0===i.length?void 0:i[0]}function u(e,t){t=t||{};var n=window.MutationObserver||window.WebkitMutationObserver,i=new n(function(e){e.forEach(function(e){y.onResize(!0),y.emit("onObserverUpdate",y,e)})});i.observe(e,{attributes:"undefined"==typeof t.attributes?!0:t.attributes,childList:"undefined"==typeof t.childList?!0:t.childList,characterData:"undefined"==typeof t.characterData?!0:t.characterData}),y.observers.push(i)}function p(e){e.originalEvent&&(e=e.originalEvent);var t=e.keyCode||e.charCode;if(!y.params.allowSwipeToNext&&(s()&&39===t||!s()&&40===t))return!1;if(!y.params.allowSwipeToPrev&&(s()&&37===t||!s()&&38===t))return!1;if(!(e.shiftKey||e.altKey||e.ctrlKey||e.metaKey||document.activeElement&&document.activeElement.nodeName&&("input"===document.activeElement.nodeName.toLowerCase()||"textarea"===document.activeElement.nodeName.toLowerCase()))){if(37===t||39===t||38===t||40===t){var n=!1;if(y.container.parents(".swiper-slide").length>0&&0===y.container.parents(".swiper-slide-active").length)return;var i={left:window.pageXOffset,top:window.pageYOffset},r=window.innerWidth,a=window.innerHeight,o=y.container.offset();y.rtl&&(o.left=o.left-y.container[0].scrollLeft);for(var l=[[o.left,o.top],[o.left+y.width,o.top],[o.left,o.top+y.height],[o.left+y.width,o.top+y.height]],c=0;c<l.length;c++){var d=l[c];d[0]>=i.left&&d[0]<=i.left+r&&d[1]>=i.top&&d[1]<=i.top+a&&(n=!0)}if(!n)return}s()?((37===t||39===t)&&(e.preventDefault?e.preventDefault():e.returnValue=!1),(39===t&&!y.rtl||37===t&&y.rtl)&&y.slideNext(),(37===t&&!y.rtl||39===t&&y.rtl)&&y.slidePrev()):((38===t||40===t)&&(e.preventDefault?e.preventDefault():e.returnValue=!1),40===t&&y.slideNext(),38===t&&y.slidePrev())}}function h(e){e.originalEvent&&(e=e.originalEvent);var t=y.mousewheel.event,n=0,i=y.rtl?-1:1;if(e.detail)n=-e.detail;else if("mousewheel"===t)if(y.params.mousewheelForceToAxis)if(s()){if(!(Math.abs(e.wheelDeltaX)>Math.abs(e.wheelDeltaY)))return;n=e.wheelDeltaX*i}else{if(!(Math.abs(e.wheelDeltaY)>Math.abs(e.wheelDeltaX)))return;n=e.wheelDeltaY}else n=Math.abs(e.wheelDeltaX)>Math.abs(e.wheelDeltaY)?-e.wheelDeltaX*i:-e.wheelDeltaY;else if("DOMMouseScroll"===t)n=-e.detail;else if("wheel"===t)if(y.params.mousewheelForceToAxis)if(s()){if(!(Math.abs(e.deltaX)>Math.abs(e.deltaY)))return;n=-e.deltaX*i}else{if(!(Math.abs(e.deltaY)>Math.abs(e.deltaX)))return;n=-e.deltaY}else n=Math.abs(e.deltaX)>Math.abs(e.deltaY)?-e.deltaX*i:-e.deltaY;if(0!==n){if(y.params.mousewheelInvert&&(n=-n),y.params.freeMode){var r=y.getWrapperTranslate()+n*y.params.mousewheelSensitivity,a=y.isBeginning,o=y.isEnd;if(r>=y.minTranslate()&&(r=y.minTranslate()),r<=y.maxTranslate()&&(r=y.maxTranslate()),y.setWrapperTransition(0),y.setWrapperTranslate(r),y.updateProgress(),y.updateActiveIndex(),(!a&&y.isBeginning||!o&&y.isEnd)&&y.updateClasses(),y.params.freeModeSticky&&(clearTimeout(y.mousewheel.timeout),y.mousewheel.timeout=setTimeout(function(){y.slideReset()},300)),0===r||r===y.maxTranslate())return}else{if((new window.Date).getTime()-y.mousewheel.lastScrollTime>60)if(0>n)if(y.isEnd&&!y.params.loop||y.animating){if(y.params.mousewheelReleaseOnEdges)return!0}else y.slideNext();else if(y.isBeginning&&!y.params.loop||y.animating){if(y.params.mousewheelReleaseOnEdges)return!0}else y.slidePrev();y.mousewheel.lastScrollTime=(new window.Date).getTime()}return y.params.autoplay&&y.stopAutoplay(),e.preventDefault?e.preventDefault():e.returnValue=!1,!1}}function f(e,n){e=t(e);var i,r,a,o=y.rtl?-1:1;i=e.attr("data-swiper-parallax")||"0",r=e.attr("data-swiper-parallax-x"),a=e.attr("data-swiper-parallax-y"), r||a?(r=r||"0",a=a||"0"):s()?(r=i,a="0"):(a=i,r="0"),r=r.indexOf("%")>=0?parseInt(r,10)*n*o+"%":r*n*o+"px",a=a.indexOf("%")>=0?parseInt(a,10)*n+"%":a*n+"px",e.transform("translate3d("+r+", "+a+",0px)")}function m(e){return 0!==e.indexOf("on")&&(e=e[0]!==e[0].toUpperCase()?"on"+e[0].toUpperCase()+e.substring(1):"on"+e),e}if(!(this instanceof n))return new n(e,r);var g={direction:"horizontal",touchEventsTarget:"container",initialSlide:0,speed:300,autoplay:!1,autoplayDisableOnInteraction:!0,iOSEdgeSwipeDetection:!1,iOSEdgeSwipeThreshold:20,freeMode:!1,freeModeMomentum:!0,freeModeMomentumRatio:1,freeModeMomentumBounce:!0,freeModeMomentumBounceRatio:1,freeModeSticky:!1,freeModeMinimumVelocity:.02,autoHeight:!1,setWrapperSize:!1,virtualTranslate:!1,effect:"slide",coverflow:{rotate:50,stretch:0,depth:100,modifier:1,slideShadows:!0},cube:{slideShadows:!0,shadow:!0,shadowOffset:20,shadowScale:.94},fade:{crossFade:!1},parallax:!1,scrollbar:null,scrollbarHide:!0,scrollbarDraggable:!1,scrollbarSnapOnRelease:!1,keyboardControl:!1,mousewheelControl:!1,mousewheelReleaseOnEdges:!1,mousewheelInvert:!1,mousewheelForceToAxis:!1,mousewheelSensitivity:1,hashnav:!1,breakpoints:void 0,spaceBetween:0,slidesPerView:1,slidesPerColumn:1,slidesPerColumnFill:"column",slidesPerGroup:1,centeredSlides:!1,slidesOffsetBefore:0,slidesOffsetAfter:0,roundLengths:!1,touchRatio:1,touchAngle:45,simulateTouch:!0,shortSwipes:!0,longSwipes:!0,longSwipesRatio:.5,longSwipesMs:300,followFinger:!0,onlyExternal:!1,threshold:0,touchMoveStopPropagation:!0,pagination:null,paginationElement:"span",paginationClickable:!1,paginationHide:!1,paginationBulletRender:null,resistance:!0,resistanceRatio:.85,nextButton:null,prevButton:null,watchSlidesProgress:!1,watchSlidesVisibility:!1,grabCursor:!1,preventClicks:!0,preventClicksPropagation:!0,slideToClickedSlide:!1,lazyLoading:!1,lazyLoadingInPrevNext:!1,lazyLoadingOnTransitionStart:!1,preloadImages:!0,updateOnImagesReady:!0,loop:!1,loopAdditionalSlides:0,loopedSlides:null,control:void 0,controlInverse:!1,controlBy:"slide",allowSwipeToPrev:!0,allowSwipeToNext:!0,swipeHandler:null,noSwiping:!0,noSwipingClass:"swiper-no-swiping",slideClass:"swiper-slide",slideActiveClass:"swiper-slide-active",slideVisibleClass:"swiper-slide-visible",slideDuplicateClass:"swiper-slide-duplicate",slideNextClass:"swiper-slide-next",slidePrevClass:"swiper-slide-prev",wrapperClass:"swiper-wrapper",bulletClass:"swiper-pagination-bullet",bulletActiveClass:"swiper-pagination-bullet-active",buttonDisabledClass:"swiper-button-disabled",paginationHiddenClass:"swiper-pagination-hidden",observer:!1,observeParents:!1,a11y:!1,prevSlideMessage:"Previous slide",nextSlideMessage:"Next slide",firstSlideMessage:"This is the first slide",lastSlideMessage:"This is the last slide",paginationBulletMessage:"Go to slide {{index}}",runCallbacksOnInit:!0},v=r&&r.virtualTranslate;r=r||{};var _={};for(var w in r)if("object"!=typeof r[w]||(r[w].nodeType||r[w]===window||r[w]===document||"undefined"!=typeof i&&r[w]instanceof i||"undefined"!=typeof jQuery&&r[w]instanceof jQuery))_[w]=r[w];else{_[w]={};for(var T in r[w])_[w][T]=r[w][T]}for(var b in g)if("undefined"==typeof r[b])r[b]=g[b];else if("object"==typeof r[b])for(var S in g[b])"undefined"==typeof r[b][S]&&(r[b][S]=g[b][S]);var y=this;if(y.params=r,y.originalParams=_,y.classNames=[],"undefined"!=typeof t&&"undefined"!=typeof i&&(t=i),("undefined"!=typeof t||(t="undefined"==typeof i?window.Dom7||window.Zepto||window.jQuery:i))&&(y.$=t,y.currentBreakpoint=void 0,y.getActiveBreakpoint=function(){if(!y.params.breakpoints)return!1;var e,t=!1,n=[];for(e in y.params.breakpoints)y.params.breakpoints.hasOwnProperty(e)&&n.push(e);n.sort(function(e,t){return parseInt(e,10)>parseInt(t,10)});for(var i=0;i<n.length;i++)e=n[i],e>=window.innerWidth&&!t&&(t=e);return t||"max"},y.setBreakpoint=function(){var e=y.getActiveBreakpoint();if(e&&y.currentBreakpoint!==e){var t=e in y.params.breakpoints?y.params.breakpoints[e]:y.originalParams;for(var n in t)y.params[n]=t[n];y.currentBreakpoint=e}},y.params.breakpoints&&y.setBreakpoint(),y.container=t(e),0!==y.container.length)){if(y.container.length>1)return void y.container.each(function(){new n(this,r)});y.container[0].swiper=y,y.container.data("swiper",y),y.classNames.push("swiper-container-"+y.params.direction),y.params.freeMode&&y.classNames.push("swiper-container-free-mode"),y.support.flexbox||(y.classNames.push("swiper-container-no-flexbox"),y.params.slidesPerColumn=1),y.params.autoHeight&&y.classNames.push("swiper-container-autoheight"),(y.params.parallax||y.params.watchSlidesVisibility)&&(y.params.watchSlidesProgress=!0),["cube","coverflow"].indexOf(y.params.effect)>=0&&(y.support.transforms3d?(y.params.watchSlidesProgress=!0,y.classNames.push("swiper-container-3d")):y.params.effect="slide"),"slide"!==y.params.effect&&y.classNames.push("swiper-container-"+y.params.effect),"cube"===y.params.effect&&(y.params.resistanceRatio=0,y.params.slidesPerView=1,y.params.slidesPerColumn=1,y.params.slidesPerGroup=1,y.params.centeredSlides=!1,y.params.spaceBetween=0,y.params.virtualTranslate=!0,y.params.setWrapperSize=!1),"fade"===y.params.effect&&(y.params.slidesPerView=1,y.params.slidesPerColumn=1,y.params.slidesPerGroup=1,y.params.watchSlidesProgress=!0,y.params.spaceBetween=0,"undefined"==typeof v&&(y.params.virtualTranslate=!0)),y.params.grabCursor&&y.support.touch&&(y.params.grabCursor=!1),y.wrapper=y.container.children("."+y.params.wrapperClass),y.params.pagination&&(y.paginationContainer=t(y.params.pagination),y.params.paginationClickable&&y.paginationContainer.addClass("swiper-pagination-clickable")),y.rtl=s()&&("rtl"===y.container[0].dir.toLowerCase()||"rtl"===y.container.css("direction")),y.rtl&&y.classNames.push("swiper-container-rtl"),y.rtl&&(y.wrongRTL="-webkit-box"===y.wrapper.css("display")),y.params.slidesPerColumn>1&&y.classNames.push("swiper-container-multirow"),y.device.android&&y.classNames.push("swiper-container-android"),y.container.addClass(y.classNames.join(" ")),y.translate=0,y.progress=0,y.velocity=0,y.lockSwipeToNext=function(){y.params.allowSwipeToNext=!1},y.lockSwipeToPrev=function(){y.params.allowSwipeToPrev=!1},y.lockSwipes=function(){y.params.allowSwipeToNext=y.params.allowSwipeToPrev=!1},y.unlockSwipeToNext=function(){y.params.allowSwipeToNext=!0},y.unlockSwipeToPrev=function(){y.params.allowSwipeToPrev=!0},y.unlockSwipes=function(){y.params.allowSwipeToNext=y.params.allowSwipeToPrev=!0},y.params.grabCursor&&(y.container[0].style.cursor="move",y.container[0].style.cursor="-webkit-grab",y.container[0].style.cursor="-moz-grab",y.container[0].style.cursor="grab"),y.imagesToLoad=[],y.imagesLoaded=0,y.loadImage=function(e,t,n,i,r){function a(){r&&r()}var o;e.complete&&i?a():t?(o=new window.Image,o.onload=a,o.onerror=a,n&&(o.srcset=n),t&&(o.src=t)):a()},y.preloadImages=function(){function e(){"undefined"!=typeof y&&null!==y&&(void 0!==y.imagesLoaded&&y.imagesLoaded++,y.imagesLoaded===y.imagesToLoad.length&&(y.params.updateOnImagesReady&&y.update(),y.emit("onImagesReady",y)))}y.imagesToLoad=y.container.find("img");for(var t=0;t<y.imagesToLoad.length;t++)y.loadImage(y.imagesToLoad[t],y.imagesToLoad[t].currentSrc||y.imagesToLoad[t].getAttribute("src"),y.imagesToLoad[t].srcset||y.imagesToLoad[t].getAttribute("srcset"),!0,e)},y.autoplayTimeoutId=void 0,y.autoplaying=!1,y.autoplayPaused=!1,y.startAutoplay=function(){return"undefined"!=typeof y.autoplayTimeoutId?!1:y.params.autoplay?y.autoplaying?!1:(y.autoplaying=!0,y.emit("onAutoplayStart",y),void c()):!1},y.stopAutoplay=function(e){y.autoplayTimeoutId&&(y.autoplayTimeoutId&&clearTimeout(y.autoplayTimeoutId),y.autoplaying=!1,y.autoplayTimeoutId=void 0,y.emit("onAutoplayStop",y))},y.pauseAutoplay=function(e){y.autoplayPaused||(y.autoplayTimeoutId&&clearTimeout(y.autoplayTimeoutId),y.autoplayPaused=!0,0===e?(y.autoplayPaused=!1,c()):y.wrapper.transitionEnd(function(){y&&(y.autoplayPaused=!1,y.autoplaying?c():y.stopAutoplay())}))},y.minTranslate=function(){return-y.snapGrid[0]},y.maxTranslate=function(){return-y.snapGrid[y.snapGrid.length-1]},y.updateAutoHeight=function(){var e=y.slides.eq(y.activeIndex)[0].offsetHeight;e&&y.wrapper.css("height",y.slides.eq(y.activeIndex)[0].offsetHeight+"px")},y.updateContainerSize=function(){var e,t;e="undefined"!=typeof y.params.width?y.params.width:y.container[0].clientWidth,t="undefined"!=typeof y.params.height?y.params.height:y.container[0].clientHeight,0===e&&s()||0===t&&!s()||(e=e-parseInt(y.container.css("padding-left"),10)-parseInt(y.container.css("padding-right"),10),t=t-parseInt(y.container.css("padding-top"),10)-parseInt(y.container.css("padding-bottom"),10),y.width=e,y.height=t,y.size=s()?y.width:y.height)},y.updateSlidesSize=function(){y.slides=y.wrapper.children("."+y.params.slideClass),y.snapGrid=[],y.slidesGrid=[],y.slidesSizesGrid=[];var e,t=y.params.spaceBetween,n=-y.params.slidesOffsetBefore,i=0,r=0;"string"==typeof t&&t.indexOf("%")>=0&&(t=parseFloat(t.replace("%",""))/100*y.size),y.virtualSize=-t,y.rtl?y.slides.css({marginLeft:"",marginTop:""}):y.slides.css({marginRight:"",marginBottom:""});var a;y.params.slidesPerColumn>1&&(a=Math.floor(y.slides.length/y.params.slidesPerColumn)===y.slides.length/y.params.slidesPerColumn?y.slides.length:Math.ceil(y.slides.length/y.params.slidesPerColumn)*y.params.slidesPerColumn,"auto"!==y.params.slidesPerView&&"row"===y.params.slidesPerColumnFill&&(a=Math.max(a,y.params.slidesPerView*y.params.slidesPerColumn)));var o,c=y.params.slidesPerColumn,d=a/c,u=d-(y.params.slidesPerColumn*d-y.slides.length);for(e=0;e<y.slides.length;e++){o=0;var p=y.slides.eq(e);if(y.params.slidesPerColumn>1){var h,f,m;"column"===y.params.slidesPerColumnFill?(f=Math.floor(e/c),m=e-f*c,(f>u||f===u&&m===c-1)&&++m>=c&&(m=0,f++),h=f+m*a/c,p.css({"-webkit-box-ordinal-group":h,"-moz-box-ordinal-group":h,"-ms-flex-order":h,"-webkit-order":h,order:h})):(m=Math.floor(e/d),f=e-m*d),p.css({"margin-top":0!==m&&y.params.spaceBetween&&y.params.spaceBetween+"px"}).attr("data-swiper-column",f).attr("data-swiper-row",m)}"none"!==p.css("display")&&("auto"===y.params.slidesPerView?(o=s()?p.outerWidth(!0):p.outerHeight(!0),y.params.roundLengths&&(o=l(o))):(o=(y.size-(y.params.slidesPerView-1)*t)/y.params.slidesPerView,y.params.roundLengths&&(o=l(o)),s()?y.slides[e].style.width=o+"px":y.slides[e].style.height=o+"px"),y.slides[e].swiperSlideSize=o,y.slidesSizesGrid.push(o),y.params.centeredSlides?(n=n+o/2+i/2+t,0===e&&(n=n-y.size/2-t),Math.abs(n)<.001&&(n=0),r%y.params.slidesPerGroup===0&&y.snapGrid.push(n),y.slidesGrid.push(n)):(r%y.params.slidesPerGroup===0&&y.snapGrid.push(n),y.slidesGrid.push(n),n=n+o+t),y.virtualSize+=o+t,i=o,r++)}y.virtualSize=Math.max(y.virtualSize,y.size)+y.params.slidesOffsetAfter;var g;if(y.rtl&&y.wrongRTL&&("slide"===y.params.effect||"coverflow"===y.params.effect)&&y.wrapper.css({width:y.virtualSize+y.params.spaceBetween+"px"}),(!y.support.flexbox||y.params.setWrapperSize)&&(s()?y.wrapper.css({width:y.virtualSize+y.params.spaceBetween+"px"}):y.wrapper.css({height:y.virtualSize+y.params.spaceBetween+"px"})),y.params.slidesPerColumn>1&&(y.virtualSize=(o+y.params.spaceBetween)*a,y.virtualSize=Math.ceil(y.virtualSize/y.params.slidesPerColumn)-y.params.spaceBetween,y.wrapper.css({width:y.virtualSize+y.params.spaceBetween+"px"}),y.params.centeredSlides)){for(g=[],e=0;e<y.snapGrid.length;e++)y.snapGrid[e]<y.virtualSize+y.snapGrid[0]&&g.push(y.snapGrid[e]);y.snapGrid=g}if(!y.params.centeredSlides){for(g=[],e=0;e<y.snapGrid.length;e++)y.snapGrid[e]<=y.virtualSize-y.size&&g.push(y.snapGrid[e]);y.snapGrid=g,Math.floor(y.virtualSize-y.size)>Math.floor(y.snapGrid[y.snapGrid.length-1])&&y.snapGrid.push(y.virtualSize-y.size)}0===y.snapGrid.length&&(y.snapGrid=[0]),0!==y.params.spaceBetween&&(s()?y.rtl?y.slides.css({marginLeft:t+"px"}):y.slides.css({marginRight:t+"px"}):y.slides.css({marginBottom:t+"px"})),y.params.watchSlidesProgress&&y.updateSlidesOffset()},y.updateSlidesOffset=function(){for(var e=0;e<y.slides.length;e++)y.slides[e].swiperSlideOffset=s()?y.slides[e].offsetLeft:y.slides[e].offsetTop},y.updateSlidesProgress=function(e){if("undefined"==typeof e&&(e=y.translate||0),0!==y.slides.length){"undefined"==typeof y.slides[0].swiperSlideOffset&&y.updateSlidesOffset();var t=-e;y.rtl&&(t=e),y.slides.removeClass(y.params.slideVisibleClass);for(var n=0;n<y.slides.length;n++){var i=y.slides[n],r=(t-i.swiperSlideOffset)/(i.swiperSlideSize+y.params.spaceBetween);if(y.params.watchSlidesVisibility){var a=-(t-i.swiperSlideOffset),o=a+y.slidesSizesGrid[n],s=a>=0&&a<y.size||o>0&&o<=y.size||0>=a&&o>=y.size;s&&y.slides.eq(n).addClass(y.params.slideVisibleClass)}i.progress=y.rtl?-r:r}}},y.updateProgress=function(e){"undefined"==typeof e&&(e=y.translate||0);var t=y.maxTranslate()-y.minTranslate(),n=y.isBeginning,i=y.isEnd;0===t?(y.progress=0,y.isBeginning=y.isEnd=!0):(y.progress=(e-y.minTranslate())/t,y.isBeginning=y.progress<=0,y.isEnd=y.progress>=1),y.isBeginning&&!n&&y.emit("onReachBeginning",y),y.isEnd&&!i&&y.emit("onReachEnd",y),y.params.watchSlidesProgress&&y.updateSlidesProgress(e),y.emit("onProgress",y,y.progress)},y.updateActiveIndex=function(){var e,t,n,i=y.rtl?y.translate:-y.translate;for(t=0;t<y.slidesGrid.length;t++)"undefined"!=typeof y.slidesGrid[t+1]?i>=y.slidesGrid[t]&&i<y.slidesGrid[t+1]-(y.slidesGrid[t+1]-y.slidesGrid[t])/2?e=t:i>=y.slidesGrid[t]&&i<y.slidesGrid[t+1]&&(e=t+1):i>=y.slidesGrid[t]&&(e=t);(0>e||"undefined"==typeof e)&&(e=0),n=Math.floor(e/y.params.slidesPerGroup),n>=y.snapGrid.length&&(n=y.snapGrid.length-1),e!==y.activeIndex&&(y.snapIndex=n,y.previousIndex=y.activeIndex,y.activeIndex=e,y.updateClasses())},y.updateClasses=function(){y.slides.removeClass(y.params.slideActiveClass+" "+y.params.slideNextClass+" "+y.params.slidePrevClass);var e=y.slides.eq(y.activeIndex);if(e.addClass(y.params.slideActiveClass),e.next("."+y.params.slideClass).addClass(y.params.slideNextClass),e.prev("."+y.params.slideClass).addClass(y.params.slidePrevClass),y.bullets&&y.bullets.length>0){y.bullets.removeClass(y.params.bulletActiveClass);var n;y.params.loop?(n=Math.ceil(y.activeIndex-y.loopedSlides)/y.params.slidesPerGroup,n>y.slides.length-1-2*y.loopedSlides&&(n-=y.slides.length-2*y.loopedSlides),n>y.bullets.length-1&&(n-=y.bullets.length)):n="undefined"!=typeof y.snapIndex?y.snapIndex:y.activeIndex||0,y.paginationContainer.length>1?y.bullets.each(function(){t(this).index()===n&&t(this).addClass(y.params.bulletActiveClass)}):y.bullets.eq(n).addClass(y.params.bulletActiveClass)}y.params.loop||(y.params.prevButton&&(y.isBeginning?(t(y.params.prevButton).addClass(y.params.buttonDisabledClass),y.params.a11y&&y.a11y&&y.a11y.disable(t(y.params.prevButton))):(t(y.params.prevButton).removeClass(y.params.buttonDisabledClass),y.params.a11y&&y.a11y&&y.a11y.enable(t(y.params.prevButton)))),y.params.nextButton&&(y.isEnd?(t(y.params.nextButton).addClass(y.params.buttonDisabledClass),y.params.a11y&&y.a11y&&y.a11y.disable(t(y.params.nextButton))):(t(y.params.nextButton).removeClass(y.params.buttonDisabledClass),y.params.a11y&&y.a11y&&y.a11y.enable(t(y.params.nextButton)))))},y.updatePagination=function(){if(y.params.pagination&&y.paginationContainer&&y.paginationContainer.length>0){for(var e="",t=y.params.loop?Math.ceil((y.slides.length-2*y.loopedSlides)/y.params.slidesPerGroup):y.snapGrid.length,n=0;t>n;n++)e+=y.params.paginationBulletRender?y.params.paginationBulletRender(n,y.params.bulletClass):"<"+y.params.paginationElement+' class="'+y.params.bulletClass+'"></'+y.params.paginationElement+">";y.paginationContainer.html(e),y.bullets=y.paginationContainer.find("."+y.params.bulletClass),y.params.paginationClickable&&y.params.a11y&&y.a11y&&y.a11y.initPagination()}},y.update=function(e){function t(){i=Math.min(Math.max(y.translate,y.maxTranslate()),y.minTranslate()),y.setWrapperTranslate(i),y.updateActiveIndex(),y.updateClasses()}if(y.updateContainerSize(),y.updateSlidesSize(),y.updateProgress(),y.updatePagination(),y.updateClasses(),y.params.scrollbar&&y.scrollbar&&y.scrollbar.set(),e){var n,i;y.controller&&y.controller.spline&&(y.controller.spline=void 0),y.params.freeMode?(t(),y.params.autoHeight&&y.updateAutoHeight()):(n=("auto"===y.params.slidesPerView||y.params.slidesPerView>1)&&y.isEnd&&!y.params.centeredSlides?y.slideTo(y.slides.length-1,0,!1,!0):y.slideTo(y.activeIndex,0,!1,!0),n||t())}else y.params.autoHeight&&y.updateAutoHeight()},y.onResize=function(e){y.params.breakpoints&&y.setBreakpoint();var t=y.params.allowSwipeToPrev,n=y.params.allowSwipeToNext;if(y.params.allowSwipeToPrev=y.params.allowSwipeToNext=!0,y.updateContainerSize(),y.updateSlidesSize(),("auto"===y.params.slidesPerView||y.params.freeMode||e)&&y.updatePagination(),y.params.scrollbar&&y.scrollbar&&y.scrollbar.set(),y.controller&&y.controller.spline&&(y.controller.spline=void 0),y.params.freeMode){var i=Math.min(Math.max(y.translate,y.maxTranslate()),y.minTranslate());y.setWrapperTranslate(i),y.updateActiveIndex(),y.updateClasses(),y.params.autoHeight&&y.updateAutoHeight()}else y.updateClasses(),("auto"===y.params.slidesPerView||y.params.slidesPerView>1)&&y.isEnd&&!y.params.centeredSlides?y.slideTo(y.slides.length-1,0,!1,!0):y.slideTo(y.activeIndex,0,!1,!0);y.params.allowSwipeToPrev=t,y.params.allowSwipeToNext=n};var E=["mousedown","mousemove","mouseup"];window.navigator.pointerEnabled?E=["pointerdown","pointermove","pointerup"]:window.navigator.msPointerEnabled&&(E=["MSPointerDown","MSPointerMove","MSPointerUp"]),y.touchEvents={start:y.support.touch||!y.params.simulateTouch?"touchstart":E[0],move:y.support.touch||!y.params.simulateTouch?"touchmove":E[1],end:y.support.touch||!y.params.simulateTouch?"touchend":E[2]},(window.navigator.pointerEnabled||window.navigator.msPointerEnabled)&&("container"===y.params.touchEventsTarget?y.container:y.wrapper).addClass("swiper-wp8-"+y.params.direction),y.initEvents=function(e){var n=e?"off":"on",i=e?"removeEventListener":"addEventListener",a="container"===y.params.touchEventsTarget?y.container[0]:y.wrapper[0],o=y.support.touch?a:document,s=y.params.nested?!0:!1;y.browser.ie?(a[i](y.touchEvents.start,y.onTouchStart,!1),o[i](y.touchEvents.move,y.onTouchMove,s),o[i](y.touchEvents.end,y.onTouchEnd,!1)):(y.support.touch&&(a[i](y.touchEvents.start,y.onTouchStart,!1),a[i](y.touchEvents.move,y.onTouchMove,s),a[i](y.touchEvents.end,y.onTouchEnd,!1)),!r.simulateTouch||y.device.ios||y.device.android||(a[i]("mousedown",y.onTouchStart,!1),document[i]("mousemove",y.onTouchMove,s),document[i]("mouseup",y.onTouchEnd,!1))),window[i]("resize",y.onResize),y.params.nextButton&&(t(y.params.nextButton)[n]("click",y.onClickNext),y.params.a11y&&y.a11y&&t(y.params.nextButton)[n]("keydown",y.a11y.onEnterKey)),y.params.prevButton&&(t(y.params.prevButton)[n]("click",y.onClickPrev),y.params.a11y&&y.a11y&&t(y.params.prevButton)[n]("keydown",y.a11y.onEnterKey)),y.params.pagination&&y.params.paginationClickable&&(t(y.paginationContainer)[n]("click","."+y.params.bulletClass,y.onClickIndex),y.params.a11y&&y.a11y&&t(y.paginationContainer)[n]("keydown","."+y.params.bulletClass,y.a11y.onEnterKey)),(y.params.preventClicks||y.params.preventClicksPropagation)&&a[i]("click",y.preventClicks,!0)},y.attachEvents=function(e){y.initEvents()},y.detachEvents=function(){y.initEvents(!0)},y.allowClick=!0,y.preventClicks=function(e){y.allowClick||(y.params.preventClicks&&e.preventDefault(),y.params.preventClicksPropagation&&y.animating&&(e.stopPropagation(),e.stopImmediatePropagation()))},y.onClickNext=function(e){e.preventDefault(),(!y.isEnd||y.params.loop)&&y.slideNext()},y.onClickPrev=function(e){e.preventDefault(),(!y.isBeginning||y.params.loop)&&y.slidePrev()},y.onClickIndex=function(e){e.preventDefault();var n=t(this).index()*y.params.slidesPerGroup;y.params.loop&&(n+=y.loopedSlides),y.slideTo(n)},y.updateClickedSlide=function(e){var n=d(e,"."+y.params.slideClass),i=!1;if(n)for(var r=0;r<y.slides.length;r++)y.slides[r]===n&&(i=!0);if(!n||!i)return y.clickedSlide=void 0,void(y.clickedIndex=void 0);if(y.clickedSlide=n,y.clickedIndex=t(n).index(),y.params.slideToClickedSlide&&void 0!==y.clickedIndex&&y.clickedIndex!==y.activeIndex){var a,o=y.clickedIndex;if(y.params.loop){if(y.animating)return;a=t(y.clickedSlide).attr("data-swiper-slide-index"),y.params.centeredSlides?o<y.loopedSlides-y.params.slidesPerView/2||o>y.slides.length-y.loopedSlides+y.params.slidesPerView/2?(y.fixLoop(),o=y.wrapper.children("."+y.params.slideClass+'[data-swiper-slide-index="'+a+'"]:not(.swiper-slide-duplicate)').eq(0).index(),setTimeout(function(){y.slideTo(o)},0)):y.slideTo(o):o>y.slides.length-y.params.slidesPerView?(y.fixLoop(),o=y.wrapper.children("."+y.params.slideClass+'[data-swiper-slide-index="'+a+'"]:not(.swiper-slide-duplicate)').eq(0).index(),setTimeout(function(){y.slideTo(o)},0)):y.slideTo(o)}else y.slideTo(o)}};var x,D,M,L,C,P,I,k,N,z,O="input, select, textarea, button",A=Date.now(),G=[];y.animating=!1,y.touches={startX:0,startY:0,currentX:0,currentY:0,diff:0};var V,R;if(y.onTouchStart=function(e){if(e.originalEvent&&(e=e.originalEvent),V="touchstart"===e.type,V||!("which"in e)||3!==e.which){if(y.params.noSwiping&&d(e,"."+y.params.noSwipingClass))return void(y.allowClick=!0);if(!y.params.swipeHandler||d(e,y.params.swipeHandler)){var n=y.touches.currentX="touchstart"===e.type?e.targetTouches[0].pageX:e.pageX,i=y.touches.currentY="touchstart"===e.type?e.targetTouches[0].pageY:e.pageY;if(!(y.device.ios&&y.params.iOSEdgeSwipeDetection&&n<=y.params.iOSEdgeSwipeThreshold)){if(x=!0,D=!1,M=!0,C=void 0,R=void 0,y.touches.startX=n,y.touches.startY=i,L=Date.now(),y.allowClick=!0,y.updateContainerSize(),y.swipeDirection=void 0,y.params.threshold>0&&(k=!1),"touchstart"!==e.type){var r=!0;t(e.target).is(O)&&(r=!1),document.activeElement&&t(document.activeElement).is(O)&&document.activeElement.blur(),r&&e.preventDefault()}y.emit("onTouchStart",y,e)}}}},y.onTouchMove=function(e){if(e.originalEvent&&(e=e.originalEvent),!(V&&"mousemove"===e.type||e.preventedByNestedSwiper)){if(y.params.onlyExternal)return y.allowClick=!1,void(x&&(y.touches.startX=y.touches.currentX="touchmove"===e.type?e.targetTouches[0].pageX:e.pageX,y.touches.startY=y.touches.currentY="touchmove"===e.type?e.targetTouches[0].pageY:e.pageY,L=Date.now()));if(V&&document.activeElement&&e.target===document.activeElement&&t(e.target).is(O))return D=!0,void(y.allowClick=!1);if(M&&y.emit("onTouchMove",y,e),!(e.targetTouches&&e.targetTouches.length>1)){if(y.touches.currentX="touchmove"===e.type?e.targetTouches[0].pageX:e.pageX,y.touches.currentY="touchmove"===e.type?e.targetTouches[0].pageY:e.pageY,"undefined"==typeof C){var n=180*Math.atan2(Math.abs(y.touches.currentY-y.touches.startY),Math.abs(y.touches.currentX-y.touches.startX))/Math.PI;C=s()?n>y.params.touchAngle:90-n>y.params.touchAngle}if(C&&y.emit("onTouchMoveOpposite",y,e),"undefined"==typeof R&&y.browser.ieTouch&&(y.touches.currentX!==y.touches.startX||y.touches.currentY!==y.touches.startY)&&(R=!0),x){if(C)return void(x=!1);if(R||!y.browser.ieTouch){y.allowClick=!1,y.emit("onSliderMove",y,e),e.preventDefault(),y.params.touchMoveStopPropagation&&!y.params.nested&&e.stopPropagation(),D||(r.loop&&y.fixLoop(),I=y.getWrapperTranslate(),y.setWrapperTransition(0),y.animating&&y.wrapper.trigger("webkitTransitionEnd transitionend oTransitionEnd MSTransitionEnd msTransitionEnd"),y.params.autoplay&&y.autoplaying&&(y.params.autoplayDisableOnInteraction?y.stopAutoplay():y.pauseAutoplay()),z=!1,y.params.grabCursor&&(y.container[0].style.cursor="move",y.container[0].style.cursor="-webkit-grabbing",y.container[0].style.cursor="-moz-grabbin",y.container[0].style.cursor="grabbing")),D=!0;var i=y.touches.diff=s()?y.touches.currentX-y.touches.startX:y.touches.currentY-y.touches.startY;i*=y.params.touchRatio,y.rtl&&(i=-i),y.swipeDirection=i>0?"prev":"next",P=i+I;var a=!0;if(i>0&&P>y.minTranslate()?(a=!1,y.params.resistance&&(P=y.minTranslate()-1+Math.pow(-y.minTranslate()+I+i,y.params.resistanceRatio))):0>i&&P<y.maxTranslate()&&(a=!1,y.params.resistance&&(P=y.maxTranslate()+1-Math.pow(y.maxTranslate()-I-i,y.params.resistanceRatio))),a&&(e.preventedByNestedSwiper=!0),!y.params.allowSwipeToNext&&"next"===y.swipeDirection&&I>P&&(P=I),!y.params.allowSwipeToPrev&&"prev"===y.swipeDirection&&P>I&&(P=I),y.params.followFinger){if(y.params.threshold>0){if(!(Math.abs(i)>y.params.threshold||k))return void(P=I);if(!k)return k=!0,y.touches.startX=y.touches.currentX,y.touches.startY=y.touches.currentY,P=I,void(y.touches.diff=s()?y.touches.currentX-y.touches.startX:y.touches.currentY-y.touches.startY)}(y.params.freeMode||y.params.watchSlidesProgress)&&y.updateActiveIndex(),y.params.freeMode&&(0===G.length&&G.push({position:y.touches[s()?"startX":"startY"],time:L}),G.push({position:y.touches[s()?"currentX":"currentY"],time:(new window.Date).getTime()})),y.updateProgress(P),y.setWrapperTranslate(P)}}}}}},y.onTouchEnd=function(e){if(e.originalEvent&&(e=e.originalEvent),M&&y.emit("onTouchEnd",y,e),M=!1,x){y.params.grabCursor&&D&&x&&(y.container[0].style.cursor="move",y.container[0].style.cursor="-webkit-grab",y.container[0].style.cursor="-moz-grab",y.container[0].style.cursor="grab");var n=Date.now(),i=n-L;if(y.allowClick&&(y.updateClickedSlide(e),y.emit("onTap",y,e),300>i&&n-A>300&&(N&&clearTimeout(N),N=setTimeout(function(){y&&(y.params.paginationHide&&y.paginationContainer.length>0&&!t(e.target).hasClass(y.params.bulletClass)&&y.paginationContainer.toggleClass(y.params.paginationHiddenClass),y.emit("onClick",y,e))},300)),300>i&&300>n-A&&(N&&clearTimeout(N),y.emit("onDoubleTap",y,e))),A=Date.now(),setTimeout(function(){y&&(y.allowClick=!0)},0),!x||!D||!y.swipeDirection||0===y.touches.diff||P===I)return void(x=D=!1);x=D=!1;var r;if(r=y.params.followFinger?y.rtl?y.translate:-y.translate:-P,y.params.freeMode){if(r<-y.minTranslate())return void y.slideTo(y.activeIndex);if(r>-y.maxTranslate())return void(y.slides.length<y.snapGrid.length?y.slideTo(y.snapGrid.length-1):y.slideTo(y.slides.length-1));if(y.params.freeModeMomentum){if(G.length>1){var a=G.pop(),o=G.pop(),s=a.position-o.position,l=a.time-o.time;y.velocity=s/l,y.velocity=y.velocity/2,Math.abs(y.velocity)<y.params.freeModeMinimumVelocity&&(y.velocity=0),(l>150||(new window.Date).getTime()-a.time>300)&&(y.velocity=0)}else y.velocity=0;G.length=0;var c=1e3*y.params.freeModeMomentumRatio,d=y.velocity*c,u=y.translate+d;y.rtl&&(u=-u);var p,h=!1,f=20*Math.abs(y.velocity)*y.params.freeModeMomentumBounceRatio;if(u<y.maxTranslate())y.params.freeModeMomentumBounce?(u+y.maxTranslate()<-f&&(u=y.maxTranslate()-f),p=y.maxTranslate(),h=!0,z=!0):u=y.maxTranslate();else if(u>y.minTranslate())y.params.freeModeMomentumBounce?(u-y.minTranslate()>f&&(u=y.minTranslate()+f),p=y.minTranslate(),h=!0,z=!0):u=y.minTranslate();else if(y.params.freeModeSticky){var m,g=0;for(g=0;g<y.snapGrid.length;g+=1)if(y.snapGrid[g]>-u){m=g;break}u=Math.abs(y.snapGrid[m]-u)<Math.abs(y.snapGrid[m-1]-u)||"next"===y.swipeDirection?y.snapGrid[m]:y.snapGrid[m-1],y.rtl||(u=-u)}if(0!==y.velocity)c=y.rtl?Math.abs((-u-y.translate)/y.velocity):Math.abs((u-y.translate)/y.velocity);else if(y.params.freeModeSticky)return void y.slideReset();y.params.freeModeMomentumBounce&&h?(y.updateProgress(p),y.setWrapperTransition(c),y.setWrapperTranslate(u),y.onTransitionStart(),y.animating=!0,y.wrapper.transitionEnd(function(){y&&z&&(y.emit("onMomentumBounce",y),y.setWrapperTransition(y.params.speed),y.setWrapperTranslate(p),y.wrapper.transitionEnd(function(){y&&y.onTransitionEnd()}))})):y.velocity?(y.updateProgress(u),y.setWrapperTransition(c),y.setWrapperTranslate(u),y.onTransitionStart(),y.animating||(y.animating=!0,y.wrapper.transitionEnd(function(){y&&y.onTransitionEnd()}))):y.updateProgress(u),y.updateActiveIndex()}return void((!y.params.freeModeMomentum||i>=y.params.longSwipesMs)&&(y.updateProgress(),y.updateActiveIndex()))}var v,_=0,w=y.slidesSizesGrid[0];for(v=0;v<y.slidesGrid.length;v+=y.params.slidesPerGroup)"undefined"!=typeof y.slidesGrid[v+y.params.slidesPerGroup]?r>=y.slidesGrid[v]&&r<y.slidesGrid[v+y.params.slidesPerGroup]&&(_=v,w=y.slidesGrid[v+y.params.slidesPerGroup]-y.slidesGrid[v]):r>=y.slidesGrid[v]&&(_=v,w=y.slidesGrid[y.slidesGrid.length-1]-y.slidesGrid[y.slidesGrid.length-2]);var T=(r-y.slidesGrid[_])/w;if(i>y.params.longSwipesMs){if(!y.params.longSwipes)return void y.slideTo(y.activeIndex);"next"===y.swipeDirection&&(T>=y.params.longSwipesRatio?y.slideTo(_+y.params.slidesPerGroup):y.slideTo(_)),"prev"===y.swipeDirection&&(T>1-y.params.longSwipesRatio?y.slideTo(_+y.params.slidesPerGroup):y.slideTo(_))}else{if(!y.params.shortSwipes)return void y.slideTo(y.activeIndex);"next"===y.swipeDirection&&y.slideTo(_+y.params.slidesPerGroup),"prev"===y.swipeDirection&&y.slideTo(_)}}},y._slideTo=function(e,t){return y.slideTo(e,t,!0,!0)},y.slideTo=function(e,t,n,i){"undefined"==typeof n&&(n=!0),"undefined"==typeof e&&(e=0),0>e&&(e=0),y.snapIndex=Math.floor(e/y.params.slidesPerGroup),y.snapIndex>=y.snapGrid.length&&(y.snapIndex=y.snapGrid.length-1);var r=-y.snapGrid[y.snapIndex];y.params.autoplay&&y.autoplaying&&(i||!y.params.autoplayDisableOnInteraction?y.pauseAutoplay(t):y.stopAutoplay()),y.updateProgress(r);for(var a=0;a<y.slidesGrid.length;a++)-Math.floor(100*r)>=Math.floor(100*y.slidesGrid[a])&&(e=a);return!y.params.allowSwipeToNext&&r<y.translate&&r<y.minTranslate()?!1:!y.params.allowSwipeToPrev&&r>y.translate&&r>y.maxTranslate()&&(y.activeIndex||0)!==e?!1:("undefined"==typeof t&&(t=y.params.speed),y.previousIndex=y.activeIndex||0,y.activeIndex=e,y.rtl&&-r===y.translate||!y.rtl&&r===y.translate?(y.params.autoHeight&&y.updateAutoHeight(),y.updateClasses(),"slide"!==y.params.effect&&y.setWrapperTranslate(r),!1):(y.updateClasses(),y.onTransitionStart(n),0===t?(y.setWrapperTranslate(r),y.setWrapperTransition(0),y.onTransitionEnd(n)):(y.setWrapperTranslate(r),y.setWrapperTransition(t),y.animating||(y.animating=!0,y.wrapper.transitionEnd(function(){y&&y.onTransitionEnd(n)}))),!0))},y.onTransitionStart=function(e){"undefined"==typeof e&&(e=!0),y.params.autoHeight&&y.updateAutoHeight(),y.lazy&&y.lazy.onTransitionStart(),e&&(y.emit("onTransitionStart",y),y.activeIndex!==y.previousIndex&&(y.emit("onSlideChangeStart",y),a.$emit("$ionicSlides.slideChangeStart",{slider:y,activeIndex:y.getSlideDataIndex(y.activeIndex),previousIndex:y.getSlideDataIndex(y.previousIndex)}),y.activeIndex>y.previousIndex?y.emit("onSlideNextStart",y):y.emit("onSlidePrevStart",y)))},y.onTransitionEnd=function(e){y.animating=!1,y.setWrapperTransition(0),"undefined"==typeof e&&(e=!0),y.lazy&&y.lazy.onTransitionEnd(),e&&(y.emit("onTransitionEnd",y),y.activeIndex!==y.previousIndex&&(y.emit("onSlideChangeEnd",y),a.$emit("$ionicSlides.slideChangeEnd",{slider:y,activeIndex:y.getSlideDataIndex(y.activeIndex),previousIndex:y.getSlideDataIndex(y.previousIndex)}),y.activeIndex>y.previousIndex?y.emit("onSlideNextEnd",y):y.emit("onSlidePrevEnd",y))),y.params.hashnav&&y.hashnav&&y.hashnav.setHash()},y.slideNext=function(e,t,n){if(y.params.loop){if(y.animating)return!1;y.fixLoop();y.container[0].clientLeft;return y.slideTo(y.activeIndex+y.params.slidesPerGroup,t,e,n)}return y.slideTo(y.activeIndex+y.params.slidesPerGroup,t,e,n)},y._slideNext=function(e){return y.slideNext(!0,e,!0)},y.slidePrev=function(e,t,n){if(y.params.loop){if(y.animating)return!1;y.fixLoop();y.container[0].clientLeft;return y.slideTo(y.activeIndex-1,t,e,n)}return y.slideTo(y.activeIndex-1,t,e,n)},y._slidePrev=function(e){return y.slidePrev(!0,e,!0)},y.slideReset=function(e,t,n){return y.slideTo(y.activeIndex,t,e)},y.setWrapperTransition=function(e,t){y.wrapper.transition(e),"slide"!==y.params.effect&&y.effects[y.params.effect]&&y.effects[y.params.effect].setTransition(e),y.params.parallax&&y.parallax&&y.parallax.setTransition(e),y.params.scrollbar&&y.scrollbar&&y.scrollbar.setTransition(e),y.params.control&&y.controller&&y.controller.setTransition(e,t),y.emit("onSetTransition",y,e)},y.setWrapperTranslate=function(e,t,n){var i=0,r=0,a=0;s()?i=y.rtl?-e:e:r=e,y.params.roundLengths&&(i=l(i),r=l(r)),y.params.virtualTranslate||(y.support.transforms3d?y.wrapper.transform("translate3d("+i+"px, "+r+"px, "+a+"px)"):y.wrapper.transform("translate("+i+"px, "+r+"px)")), y.translate=s()?i:r;var o,c=y.maxTranslate()-y.minTranslate();o=0===c?0:(e-y.minTranslate())/c,o!==y.progress&&y.updateProgress(e),t&&y.updateActiveIndex(),"slide"!==y.params.effect&&y.effects[y.params.effect]&&y.effects[y.params.effect].setTranslate(y.translate),y.params.parallax&&y.parallax&&y.parallax.setTranslate(y.translate),y.params.scrollbar&&y.scrollbar&&y.scrollbar.setTranslate(y.translate),y.params.control&&y.controller&&y.controller.setTranslate(y.translate,n),y.emit("onSetTranslate",y,y.translate)},y.getTranslate=function(e,t){var n,i,r,a;return"undefined"==typeof t&&(t="x"),y.params.virtualTranslate?y.rtl?-y.translate:y.translate:(r=window.getComputedStyle(e,null),window.WebKitCSSMatrix?(i=r.transform||r.webkitTransform,i.split(",").length>6&&(i=i.split(", ").map(function(e){return e.replace(",",".")}).join(", ")),a=new window.WebKitCSSMatrix("none"===i?"":i)):(a=r.MozTransform||r.OTransform||r.MsTransform||r.msTransform||r.transform||r.getPropertyValue("transform").replace("translate(","matrix(1, 0, 0, 1,"),n=a.toString().split(",")),"x"===t&&(i=window.WebKitCSSMatrix?a.m41:16===n.length?parseFloat(n[12]):parseFloat(n[4])),"y"===t&&(i=window.WebKitCSSMatrix?a.m42:16===n.length?parseFloat(n[13]):parseFloat(n[5])),y.rtl&&i&&(i=-i),i||0)},y.getWrapperTranslate=function(e){return"undefined"==typeof e&&(e=s()?"x":"y"),y.getTranslate(y.wrapper[0],e)},y.observers=[],y.initObservers=function(){if(y.params.observeParents)for(var e=y.container.parents(),t=0;t<e.length;t++)u(e[t]);u(y.container[0],{childList:!1}),u(y.wrapper[0],{attributes:!1})},y.disconnectObservers=function(){for(var e=0;e<y.observers.length;e++)y.observers[e].disconnect();y.observers=[]},y.updateLoop=function(){var e=y.slides.eq(y.activeIndex);if(angular.element(e).hasClass(y.params.slideDuplicateClass)){for(var t=angular.element(e).attr("data-swiper-slide-index"),n=y.wrapper.children("."+y.params.slideClass),i=0;i<n.length;i++)if(!angular.element(n[i]).hasClass(y.params.slideDuplicateClass)&&angular.element(n[i]).attr("data-swiper-slide-index")===t){y.slideTo(i,0,!1,!0);break}setTimeout(function(){y.createLoop()},50)}},y.getSlideDataIndex=function(e){var t=y.slides.eq(e),n=angular.element(t).attr("data-swiper-slide-index");return parseInt(n)},y.createLoop=function(){y.wrapper.children("."+y.params.slideClass+"."+y.params.slideDuplicateClass).remove();var e=y.wrapper.children("."+y.params.slideClass);"auto"!==y.params.slidesPerView||y.params.loopedSlides||(y.params.loopedSlides=e.length),y.loopedSlides=parseInt(y.params.loopedSlides||y.params.slidesPerView,10),y.loopedSlides=y.loopedSlides+y.params.loopAdditionalSlides,y.loopedSlides>e.length&&(y.loopedSlides=e.length);var n,i,r,a=[],s=[];for(e.each(function(n,i){var r=t(this);n<y.loopedSlides&&s.push(i),n<e.length&&n>=e.length-y.loopedSlides&&a.push(i),r.attr("data-swiper-slide-index",n)}),n=0;n<s.length;n++)r=angular.element(s[n]).clone().addClass(y.params.slideDuplicateClass),r.removeAttr("ng-transclude"),r.removeAttr("ng-repeat"),i=angular.element(s[n]).scope(),r=o(r)(i),angular.element(y.wrapper).append(r);for(n=a.length-1;n>=0;n--)r=angular.element(a[n]).clone().addClass(y.params.slideDuplicateClass),r.removeAttr("ng-transclude"),r.removeAttr("ng-repeat"),i=angular.element(a[n]).scope(),r=o(r)(i),angular.element(y.wrapper).prepend(r)},y.destroyLoop=function(){y.wrapper.children("."+y.params.slideClass+"."+y.params.slideDuplicateClass).remove(),y.slides.removeAttr("data-swiper-slide-index")},y.fixLoop=function(){var e;y.activeIndex<y.loopedSlides?(e=y.slides.length-3*y.loopedSlides+y.activeIndex,e+=y.loopedSlides,y.slideTo(e,0,!1,!0)):("auto"===y.params.slidesPerView&&y.activeIndex>=2*y.loopedSlides||y.activeIndex>y.slides.length-2*y.params.slidesPerView)&&(e=-y.slides.length+y.activeIndex+y.loopedSlides,e+=y.loopedSlides,y.slideTo(e,0,!1,!0))},y.appendSlide=function(e){if(y.params.loop&&y.destroyLoop(),"object"==typeof e&&e.length)for(var t=0;t<e.length;t++)e[t]&&y.wrapper.append(e[t]);else y.wrapper.append(e);y.params.loop&&y.createLoop(),y.params.observer&&y.support.observer||y.update(!0)},y.prependSlide=function(e){y.params.loop&&y.destroyLoop();var t=y.activeIndex+1;if("object"==typeof e&&e.length){for(var n=0;n<e.length;n++)e[n]&&y.wrapper.prepend(e[n]);t=y.activeIndex+e.length}else y.wrapper.prepend(e);y.params.loop&&y.createLoop(),y.params.observer&&y.support.observer||y.update(!0),y.slideTo(t,0,!1)},y.removeSlide=function(e){y.params.loop&&(y.destroyLoop(),y.slides=y.wrapper.children("."+y.params.slideClass));var t,n=y.activeIndex;if("object"==typeof e&&e.length){for(var i=0;i<e.length;i++)t=e[i],y.slides[t]&&y.slides.eq(t).remove(),n>t&&n--;n=Math.max(n,0)}else t=e,y.slides[t]&&y.slides.eq(t).remove(),n>t&&n--,n=Math.max(n,0);y.params.loop&&y.createLoop(),y.params.observer&&y.support.observer||y.update(!0),y.params.loop?y.slideTo(n+y.loopedSlides,0,!1):y.slideTo(n,0,!1)},y.removeAllSlides=function(){for(var e=[],t=0;t<y.slides.length;t++)e.push(t);y.removeSlide(e)},y.effects={fade:{setTranslate:function(){for(var e=0;e<y.slides.length;e++){var t=y.slides.eq(e),n=t[0].swiperSlideOffset,i=-n;y.params.virtualTranslate||(i-=y.translate);var r=0;s()||(r=i,i=0);var a=y.params.fade.crossFade?Math.max(1-Math.abs(t[0].progress),0):1+Math.min(Math.max(t[0].progress,-1),0);t.css({opacity:a}).transform("translate3d("+i+"px, "+r+"px, 0px)")}},setTransition:function(e){if(y.slides.transition(e),y.params.virtualTranslate&&0!==e){var t=!1;y.slides.transitionEnd(function(){if(!t&&y){t=!0,y.animating=!1;for(var e=["webkitTransitionEnd","transitionend","oTransitionEnd","MSTransitionEnd","msTransitionEnd"],n=0;n<e.length;n++)y.wrapper.trigger(e[n])}})}}},cube:{setTranslate:function(){var e,n=0;y.params.cube.shadow&&(s()?(e=y.wrapper.find(".swiper-cube-shadow"),0===e.length&&(e=t('<div class="swiper-cube-shadow"></div>'),y.wrapper.append(e)),e.css({height:y.width+"px"})):(e=y.container.find(".swiper-cube-shadow"),0===e.length&&(e=t('<div class="swiper-cube-shadow"></div>'),y.container.append(e))));for(var i=0;i<y.slides.length;i++){var r=y.slides.eq(i),a=90*i,o=Math.floor(a/360);y.rtl&&(a=-a,o=Math.floor(-a/360));var l=Math.max(Math.min(r[0].progress,1),-1),c=0,d=0,u=0;i%4===0?(c=4*-o*y.size,u=0):(i-1)%4===0?(c=0,u=4*-o*y.size):(i-2)%4===0?(c=y.size+4*o*y.size,u=y.size):(i-3)%4===0&&(c=-y.size,u=3*y.size+4*y.size*o),y.rtl&&(c=-c),s()||(d=c,c=0);var p="rotateX("+(s()?0:-a)+"deg) rotateY("+(s()?a:0)+"deg) translate3d("+c+"px, "+d+"px, "+u+"px)";if(1>=l&&l>-1&&(n=90*i+90*l,y.rtl&&(n=90*-i-90*l)),r.transform(p),y.params.cube.slideShadows){var h=s()?r.find(".swiper-slide-shadow-left"):r.find(".swiper-slide-shadow-top"),f=s()?r.find(".swiper-slide-shadow-right"):r.find(".swiper-slide-shadow-bottom");0===h.length&&(h=t('<div class="swiper-slide-shadow-'+(s()?"left":"top")+'"></div>'),r.append(h)),0===f.length&&(f=t('<div class="swiper-slide-shadow-'+(s()?"right":"bottom")+'"></div>'),r.append(f));r[0].progress;h.length&&(h[0].style.opacity=-r[0].progress),f.length&&(f[0].style.opacity=r[0].progress)}}if(y.wrapper.css({"-webkit-transform-origin":"50% 50% -"+y.size/2+"px","-moz-transform-origin":"50% 50% -"+y.size/2+"px","-ms-transform-origin":"50% 50% -"+y.size/2+"px","transform-origin":"50% 50% -"+y.size/2+"px"}),y.params.cube.shadow)if(s())e.transform("translate3d(0px, "+(y.width/2+y.params.cube.shadowOffset)+"px, "+-y.width/2+"px) rotateX(90deg) rotateZ(0deg) scale("+y.params.cube.shadowScale+")");else{var m=Math.abs(n)-90*Math.floor(Math.abs(n)/90),g=1.5-(Math.sin(2*m*Math.PI/360)/2+Math.cos(2*m*Math.PI/360)/2),v=y.params.cube.shadowScale,_=y.params.cube.shadowScale/g,w=y.params.cube.shadowOffset;e.transform("scale3d("+v+", 1, "+_+") translate3d(0px, "+(y.height/2+w)+"px, "+-y.height/2/_+"px) rotateX(-90deg)")}var T=y.isSafari||y.isUiWebView?-y.size/2:0;y.wrapper.transform("translate3d(0px,0,"+T+"px) rotateX("+(s()?0:n)+"deg) rotateY("+(s()?-n:0)+"deg)")},setTransition:function(e){y.slides.transition(e).find(".swiper-slide-shadow-top, .swiper-slide-shadow-right, .swiper-slide-shadow-bottom, .swiper-slide-shadow-left").transition(e),y.params.cube.shadow&&!s()&&y.container.find(".swiper-cube-shadow").transition(e)}},coverflow:{setTranslate:function(){for(var e=y.translate,n=s()?-e+y.width/2:-e+y.height/2,i=s()?y.params.coverflow.rotate:-y.params.coverflow.rotate,r=y.params.coverflow.depth,a=0,o=y.slides.length;o>a;a++){var l=y.slides.eq(a),c=y.slidesSizesGrid[a],d=l[0].swiperSlideOffset,u=(n-d-c/2)/c*y.params.coverflow.modifier,p=s()?i*u:0,h=s()?0:i*u,f=-r*Math.abs(u),m=s()?0:y.params.coverflow.stretch*u,g=s()?y.params.coverflow.stretch*u:0;Math.abs(g)<.001&&(g=0),Math.abs(m)<.001&&(m=0),Math.abs(f)<.001&&(f=0),Math.abs(p)<.001&&(p=0),Math.abs(h)<.001&&(h=0);var v="translate3d("+g+"px,"+m+"px,"+f+"px) rotateX("+h+"deg) rotateY("+p+"deg)";if(l.transform(v),l[0].style.zIndex=-Math.abs(Math.round(u))+1,y.params.coverflow.slideShadows){var _=s()?l.find(".swiper-slide-shadow-left"):l.find(".swiper-slide-shadow-top"),w=s()?l.find(".swiper-slide-shadow-right"):l.find(".swiper-slide-shadow-bottom");0===_.length&&(_=t('<div class="swiper-slide-shadow-'+(s()?"left":"top")+'"></div>'),l.append(_)),0===w.length&&(w=t('<div class="swiper-slide-shadow-'+(s()?"right":"bottom")+'"></div>'),l.append(w)),_.length&&(_[0].style.opacity=u>0?u:0),w.length&&(w[0].style.opacity=-u>0?-u:0)}}if(y.browser.ie){var T=y.wrapper[0].style;T.perspectiveOrigin=n+"px 50%"}},setTransition:function(e){y.slides.transition(e).find(".swiper-slide-shadow-top, .swiper-slide-shadow-right, .swiper-slide-shadow-bottom, .swiper-slide-shadow-left").transition(e)}}},y.lazy={initialImageLoaded:!1,loadImageInSlide:function(e,n){if("undefined"!=typeof e&&("undefined"==typeof n&&(n=!0),0!==y.slides.length)){var i=y.slides.eq(e),r=i.find(".swiper-lazy:not(.swiper-lazy-loaded):not(.swiper-lazy-loading)");!i.hasClass("swiper-lazy")||i.hasClass("swiper-lazy-loaded")||i.hasClass("swiper-lazy-loading")||(r=r.add(i[0])),0!==r.length&&r.each(function(){var e=t(this);e.addClass("swiper-lazy-loading");var r=e.attr("data-background"),a=e.attr("data-src"),o=e.attr("data-srcset");y.loadImage(e[0],a||r,o,!1,function(){if(r?(e.css("background-image","url("+r+")"),e.removeAttr("data-background")):(o&&(e.attr("srcset",o),e.removeAttr("data-srcset")),a&&(e.attr("src",a),e.removeAttr("data-src"))),e.addClass("swiper-lazy-loaded").removeClass("swiper-lazy-loading"),i.find(".swiper-lazy-preloader, .preloader").remove(),y.params.loop&&n){var t=i.attr("data-swiper-slide-index");if(i.hasClass(y.params.slideDuplicateClass)){var s=y.wrapper.children('[data-swiper-slide-index="'+t+'"]:not(.'+y.params.slideDuplicateClass+")");y.lazy.loadImageInSlide(s.index(),!1)}else{var l=y.wrapper.children("."+y.params.slideDuplicateClass+'[data-swiper-slide-index="'+t+'"]');y.lazy.loadImageInSlide(l.index(),!1)}}y.emit("onLazyImageReady",y,i[0],e[0])}),y.emit("onLazyImageLoad",y,i[0],e[0])})}},load:function(){var e;if(y.params.watchSlidesVisibility)y.wrapper.children("."+y.params.slideVisibleClass).each(function(){y.lazy.loadImageInSlide(t(this).index())});else if(y.params.slidesPerView>1)for(e=y.activeIndex;e<y.activeIndex+y.params.slidesPerView;e++)y.slides[e]&&y.lazy.loadImageInSlide(e);else y.lazy.loadImageInSlide(y.activeIndex);if(y.params.lazyLoadingInPrevNext)if(y.params.slidesPerView>1){for(e=y.activeIndex+y.params.slidesPerView;e<y.activeIndex+y.params.slidesPerView+y.params.slidesPerView;e++)y.slides[e]&&y.lazy.loadImageInSlide(e);for(e=y.activeIndex-y.params.slidesPerView;e<y.activeIndex;e++)y.slides[e]&&y.lazy.loadImageInSlide(e)}else{var n=y.wrapper.children("."+y.params.slideNextClass);n.length>0&&y.lazy.loadImageInSlide(n.index());var i=y.wrapper.children("."+y.params.slidePrevClass);i.length>0&&y.lazy.loadImageInSlide(i.index())}},onTransitionStart:function(){y.params.lazyLoading&&(y.params.lazyLoadingOnTransitionStart||!y.params.lazyLoadingOnTransitionStart&&!y.lazy.initialImageLoaded)&&y.lazy.load()},onTransitionEnd:function(){y.params.lazyLoading&&!y.params.lazyLoadingOnTransitionStart&&y.lazy.load()}},y.scrollbar={isTouched:!1,setDragPosition:function(e){var t=y.scrollbar,n=s()?"touchstart"===e.type||"touchmove"===e.type?e.targetTouches[0].pageX:e.pageX||e.clientX:"touchstart"===e.type||"touchmove"===e.type?e.targetTouches[0].pageY:e.pageY||e.clientY,i=n-t.track.offset()[s()?"left":"top"]-t.dragSize/2,r=-y.minTranslate()*t.moveDivider,a=-y.maxTranslate()*t.moveDivider;r>i?i=r:i>a&&(i=a),i=-i/t.moveDivider,y.updateProgress(i),y.setWrapperTranslate(i,!0)},dragStart:function(e){var t=y.scrollbar;t.isTouched=!0,e.preventDefault(),e.stopPropagation(),t.setDragPosition(e),clearTimeout(t.dragTimeout),t.track.transition(0),y.params.scrollbarHide&&t.track.css("opacity",1),y.wrapper.transition(100),t.drag.transition(100),y.emit("onScrollbarDragStart",y)},dragMove:function(e){var t=y.scrollbar;t.isTouched&&(e.preventDefault?e.preventDefault():e.returnValue=!1,t.setDragPosition(e),y.wrapper.transition(0),t.track.transition(0),t.drag.transition(0),y.emit("onScrollbarDragMove",y))},dragEnd:function(e){var t=y.scrollbar;t.isTouched&&(t.isTouched=!1,y.params.scrollbarHide&&(clearTimeout(t.dragTimeout),t.dragTimeout=setTimeout(function(){t.track.css("opacity",0),t.track.transition(400)},1e3)),y.emit("onScrollbarDragEnd",y),y.params.scrollbarSnapOnRelease&&y.slideReset())},enableDraggable:function(){var e=y.scrollbar,n=y.support.touch?e.track:document;t(e.track).on(y.touchEvents.start,e.dragStart),t(n).on(y.touchEvents.move,e.dragMove),t(n).on(y.touchEvents.end,e.dragEnd)},disableDraggable:function(){var e=y.scrollbar,n=y.support.touch?e.track:document;t(e.track).off(y.touchEvents.start,e.dragStart),t(n).off(y.touchEvents.move,e.dragMove),t(n).off(y.touchEvents.end,e.dragEnd)},set:function(){if(y.params.scrollbar){var e=y.scrollbar;e.track=t(y.params.scrollbar),e.drag=e.track.find(".swiper-scrollbar-drag"),0===e.drag.length&&(e.drag=t('<div class="swiper-scrollbar-drag"></div>'),e.track.append(e.drag)),e.drag[0].style.width="",e.drag[0].style.height="",e.trackSize=s()?e.track[0].offsetWidth:e.track[0].offsetHeight,e.divider=y.size/y.virtualSize,e.moveDivider=e.divider*(e.trackSize/y.size),e.dragSize=e.trackSize*e.divider,s()?e.drag[0].style.width=e.dragSize+"px":e.drag[0].style.height=e.dragSize+"px",e.divider>=1?e.track[0].style.display="none":e.track[0].style.display="",y.params.scrollbarHide&&(e.track[0].style.opacity=0)}},setTranslate:function(){if(y.params.scrollbar){var e,t=y.scrollbar,n=(y.translate||0,t.dragSize);e=(t.trackSize-t.dragSize)*y.progress,y.rtl&&s()?(e=-e,e>0?(n=t.dragSize-e,e=0):-e+t.dragSize>t.trackSize&&(n=t.trackSize+e)):0>e?(n=t.dragSize+e,e=0):e+t.dragSize>t.trackSize&&(n=t.trackSize-e),s()?(y.support.transforms3d?t.drag.transform("translate3d("+e+"px, 0, 0)"):t.drag.transform("translateX("+e+"px)"),t.drag[0].style.width=n+"px"):(y.support.transforms3d?t.drag.transform("translate3d(0px, "+e+"px, 0)"):t.drag.transform("translateY("+e+"px)"),t.drag[0].style.height=n+"px"),y.params.scrollbarHide&&(clearTimeout(t.timeout),t.track[0].style.opacity=1,t.timeout=setTimeout(function(){t.track[0].style.opacity=0,t.track.transition(400)},1e3))}},setTransition:function(e){y.params.scrollbar&&y.scrollbar.drag.transition(e)}},y.controller={LinearSpline:function(e,t){this.x=e,this.y=t,this.lastIndex=e.length-1;var n,i;this.x.length;this.interpolate=function(e){return e?(i=r(this.x,e),n=i-1,(e-this.x[n])*(this.y[i]-this.y[n])/(this.x[i]-this.x[n])+this.y[n]):0};var r=function(){var e,t,n;return function(i,r){for(t=-1,e=i.length;e-t>1;)i[n=e+t>>1]<=r?t=n:e=n;return e}}()},getInterpolateFunction:function(e){y.controller.spline||(y.controller.spline=y.params.loop?new y.controller.LinearSpline(y.slidesGrid,e.slidesGrid):new y.controller.LinearSpline(y.snapGrid,e.snapGrid))},setTranslate:function(e,t){function i(t){e=t.rtl&&"horizontal"===t.params.direction?-y.translate:y.translate,"slide"===y.params.controlBy&&(y.controller.getInterpolateFunction(t),a=-y.controller.spline.interpolate(-e)),a&&"container"!==y.params.controlBy||(r=(t.maxTranslate()-t.minTranslate())/(y.maxTranslate()-y.minTranslate()),a=(e-y.minTranslate())*r+t.minTranslate()),y.params.controlInverse&&(a=t.maxTranslate()-a),t.updateProgress(a),t.setWrapperTranslate(a,!1,y),t.updateActiveIndex()}var r,a,o=y.params.control;if(y.isArray(o))for(var s=0;s<o.length;s++)o[s]!==t&&o[s]instanceof n&&i(o[s]);else o instanceof n&&t!==o&&i(o)},setTransition:function(e,t){function i(t){t.setWrapperTransition(e,y),0!==e&&(t.onTransitionStart(),t.wrapper.transitionEnd(function(){a&&(t.params.loop&&"slide"===y.params.controlBy&&t.fixLoop(),t.onTransitionEnd())}))}var r,a=y.params.control;if(y.isArray(a))for(r=0;r<a.length;r++)a[r]!==t&&a[r]instanceof n&&i(a[r]);else a instanceof n&&t!==a&&i(a)}},y.hashnav={init:function(){if(y.params.hashnav){y.hashnav.initialized=!0;var e=document.location.hash.replace("#","");if(e)for(var t=0,n=0,i=y.slides.length;i>n;n++){var r=y.slides.eq(n),a=r.attr("data-hash");if(a===e&&!r.hasClass(y.params.slideDuplicateClass)){var o=r.index();y.slideTo(o,t,y.params.runCallbacksOnInit,!0)}}}},setHash:function(){y.hashnav.initialized&&y.params.hashnav&&(document.location.hash=y.slides.eq(y.activeIndex).attr("data-hash")||"")}},y.disableKeyboardControl=function(){y.params.keyboardControl=!1,t(document).off("keydown",p)},y.enableKeyboardControl=function(){y.params.keyboardControl=!0,t(document).on("keydown",p)},y.mousewheel={event:!1,lastScrollTime:(new window.Date).getTime()},y.params.mousewheelControl){try{new window.WheelEvent("wheel"),y.mousewheel.event="wheel"}catch(H){}y.mousewheel.event||void 0===document.onmousewheel||(y.mousewheel.event="mousewheel"),y.mousewheel.event||(y.mousewheel.event="DOMMouseScroll")}y.disableMousewheelControl=function(){return y.mousewheel.event?(y.container.off(y.mousewheel.event,h),!0):!1},y.enableMousewheelControl=function(){return y.mousewheel.event?(y.container.on(y.mousewheel.event,h),!0):!1},y.parallax={setTranslate:function(){y.container.children("[data-swiper-parallax], [data-swiper-parallax-x], [data-swiper-parallax-y]").each(function(){f(this,y.progress)}),y.slides.each(function(){var e=t(this);e.find("[data-swiper-parallax], [data-swiper-parallax-x], [data-swiper-parallax-y]").each(function(){var t=Math.min(Math.max(e[0].progress,-1),1);f(this,t)})})},setTransition:function(e){"undefined"==typeof e&&(e=y.params.speed),y.container.find("[data-swiper-parallax], [data-swiper-parallax-x], [data-swiper-parallax-y]").each(function(){var n=t(this),i=parseInt(n.attr("data-swiper-parallax-duration"),10)||e;0===e&&(i=0),n.transition(i)})}},y._plugins=[];for(var Y in y.plugins){var X=y.plugins[Y](y,y.params[Y]);X&&y._plugins.push(X)}return y.callPlugins=function(e){for(var t=0;t<y._plugins.length;t++)e in y._plugins[t]&&y._plugins[t][e](arguments[1],arguments[2],arguments[3],arguments[4],arguments[5])},y.emitterEventListeners={},y.emit=function(e){y.params[e]&&y.params[e](arguments[1],arguments[2],arguments[3],arguments[4],arguments[5]);var t;if(y.emitterEventListeners[e])for(t=0;t<y.emitterEventListeners[e].length;t++)y.emitterEventListeners[e][t](arguments[1],arguments[2],arguments[3],arguments[4],arguments[5]);y.callPlugins&&y.callPlugins(e,arguments[1],arguments[2],arguments[3],arguments[4],arguments[5])},y.on=function(e,t){return e=m(e),y.emitterEventListeners[e]||(y.emitterEventListeners[e]=[]),y.emitterEventListeners[e].push(t),y},y.off=function(e,t){var n;if(e=m(e),"undefined"==typeof t)return y.emitterEventListeners[e]=[],y;if(y.emitterEventListeners[e]&&0!==y.emitterEventListeners[e].length){for(n=0;n<y.emitterEventListeners[e].length;n++)y.emitterEventListeners[e][n]===t&&y.emitterEventListeners[e].splice(n,1);return y}},y.once=function(e,t){e=m(e);var n=function(){t(arguments[0],arguments[1],arguments[2],arguments[3],arguments[4]),y.off(e,n)};return y.on(e,n),y},y.a11y={makeFocusable:function(e){return e.attr("tabIndex","0"),e},addRole:function(e,t){return e.attr("role",t),e},addLabel:function(e,t){return e.attr("aria-label",t),e},disable:function(e){return e.attr("aria-disabled",!0),e},enable:function(e){return e.attr("aria-disabled",!1),e},onEnterKey:function(e){13===e.keyCode&&(t(e.target).is(y.params.nextButton)?(y.onClickNext(e),y.isEnd?y.a11y.notify(y.params.lastSlideMessage):y.a11y.notify(y.params.nextSlideMessage)):t(e.target).is(y.params.prevButton)&&(y.onClickPrev(e),y.isBeginning?y.a11y.notify(y.params.firstSlideMessage):y.a11y.notify(y.params.prevSlideMessage)),t(e.target).is("."+y.params.bulletClass)&&t(e.target)[0].click())},liveRegion:t('<span class="swiper-notification" aria-live="assertive" aria-atomic="true"></span>'),notify:function(e){var t=y.a11y.liveRegion;0!==t.length&&(t.html(""),t.html(e))},init:function(){if(y.params.nextButton){var e=t(y.params.nextButton);y.a11y.makeFocusable(e),y.a11y.addRole(e,"button"),y.a11y.addLabel(e,y.params.nextSlideMessage)}if(y.params.prevButton){var n=t(y.params.prevButton);y.a11y.makeFocusable(n),y.a11y.addRole(n,"button"),y.a11y.addLabel(n,y.params.prevSlideMessage)}t(y.container).append(y.a11y.liveRegion)},initPagination:function(){y.params.pagination&&y.params.paginationClickable&&y.bullets&&y.bullets.length&&y.bullets.each(function(){var e=t(this);y.a11y.makeFocusable(e),y.a11y.addRole(e,"button"),y.a11y.addLabel(e,y.params.paginationBulletMessage.replace(/{{index}}/,e.index()+1))})},destroy:function(){y.a11y.liveRegion&&y.a11y.liveRegion.length>0&&y.a11y.liveRegion.remove()}},y.init=function(){y.params.loop&&y.createLoop(),y.updateContainerSize(),y.updateSlidesSize(),y.updatePagination(),y.params.scrollbar&&y.scrollbar&&(y.scrollbar.set(),y.params.scrollbarDraggable&&y.scrollbar.enableDraggable()),"slide"!==y.params.effect&&y.effects[y.params.effect]&&(y.params.loop||y.updateProgress(),y.effects[y.params.effect].setTranslate()),y.params.loop?y.slideTo(y.params.initialSlide+y.loopedSlides,0,y.params.runCallbacksOnInit):(y.slideTo(y.params.initialSlide,0,y.params.runCallbacksOnInit),0===y.params.initialSlide&&(y.parallax&&y.params.parallax&&y.parallax.setTranslate(),y.lazy&&y.params.lazyLoading&&(y.lazy.load(),y.lazy.initialImageLoaded=!0))),y.attachEvents(),y.params.observer&&y.support.observer&&y.initObservers(),y.params.preloadImages&&!y.params.lazyLoading&&y.preloadImages(),y.params.autoplay&&y.startAutoplay(),y.params.keyboardControl&&y.enableKeyboardControl&&y.enableKeyboardControl(),y.params.mousewheelControl&&y.enableMousewheelControl&&y.enableMousewheelControl(),y.params.hashnav&&y.hashnav&&y.hashnav.init(),y.params.a11y&&y.a11y&&y.a11y.init(),y.emit("onInit",y)},y.cleanupStyles=function(){y.container.removeClass(y.classNames.join(" ")).removeAttr("style"),y.wrapper.removeAttr("style"),y.slides&&y.slides.length&&y.slides.removeClass([y.params.slideVisibleClass,y.params.slideActiveClass,y.params.slideNextClass,y.params.slidePrevClass].join(" ")).removeAttr("style").removeAttr("data-swiper-column").removeAttr("data-swiper-row"),y.paginationContainer&&y.paginationContainer.length&&y.paginationContainer.removeClass(y.params.paginationHiddenClass),y.bullets&&y.bullets.length&&y.bullets.removeClass(y.params.bulletActiveClass),y.params.prevButton&&t(y.params.prevButton).removeClass(y.params.buttonDisabledClass),y.params.nextButton&&t(y.params.nextButton).removeClass(y.params.buttonDisabledClass),y.params.scrollbar&&y.scrollbar&&(y.scrollbar.track&&y.scrollbar.track.length&&y.scrollbar.track.removeAttr("style"),y.scrollbar.drag&&y.scrollbar.drag.length&&y.scrollbar.drag.removeAttr("style"))},y.destroy=function(e,t){y.detachEvents(),y.stopAutoplay(),y.params.scrollbar&&y.scrollbar&&y.params.scrollbarDraggable&&y.scrollbar.disableDraggable(),y.params.loop&&y.destroyLoop(),t&&y.cleanupStyles(),y.disconnectObservers(),y.params.keyboardControl&&y.disableKeyboardControl&&y.disableKeyboardControl(),y.params.mousewheelControl&&y.disableMousewheelControl&&y.disableMousewheelControl(),y.params.a11y&&y.a11y&&y.a11y.destroy(),y.emit("onDestroy"),e!==!1&&(y=null)},y.init(),y}};n.prototype={isSafari:function(){var e=navigator.userAgent.toLowerCase();return e.indexOf("safari")>=0&&e.indexOf("chrome")<0&&e.indexOf("android")<0}(),isUiWebView:/(iPhone|iPod|iPad).*AppleWebKit(?!.*Safari)/i.test(navigator.userAgent),isArray:function(e){return"[object Array]"===Object.prototype.toString.apply(e)},browser:{ie:window.navigator.pointerEnabled||window.navigator.msPointerEnabled,ieTouch:window.navigator.msPointerEnabled&&window.navigator.msMaxTouchPoints>1||window.navigator.pointerEnabled&&window.navigator.maxTouchPoints>1},device:function(){var e=navigator.userAgent,t=e.match(/(Android);?[\s\/]+([\d.]+)?/),n=e.match(/(iPad).*OS\s([\d_]+)/),i=e.match(/(iPod)(.*OS\s([\d_]+))?/),r=!n&&e.match(/(iPhone\sOS)\s([\d_]+)/);return{ios:n||r||i,android:t}}(),support:{touch:window.Modernizr&&Modernizr.touch===!0||function(){return!!("ontouchstart"in window||window.DocumentTouch&&document instanceof DocumentTouch)}(),transforms3d:window.Modernizr&&Modernizr.csstransforms3d===!0||function(){var e=document.createElement("div").style;return"webkitPerspective"in e||"MozPerspective"in e||"OPerspective"in e||"MsPerspective"in e||"perspective"in e}(),flexbox:function(){for(var e=document.createElement("div").style,t="alignItems webkitAlignItems webkitBoxAlign msFlexAlign mozBoxAlign webkitFlexDirection msFlexDirection mozBoxDirection mozBoxOrient webkitBoxDirection webkitBoxOrient".split(" "),n=0;n<t.length;n++)if(t[n]in e)return!0}(),observer:function(){return"MutationObserver"in window||"WebkitMutationObserver"in window}()},plugins:{}};for(var i=(function(){var e=function(e){var t=this,n=0;for(n=0;n<e.length;n++)t[n]=e[n];return t.length=e.length,this},t=function(t,n){var i=[],r=0;if(t&&!n&&t instanceof e)return t;if(t)if("string"==typeof t){var a,o,s=t.trim();if(s.indexOf("<")>=0&&s.indexOf(">")>=0){var l="div";for(0===s.indexOf("<li")&&(l="ul"),0===s.indexOf("<tr")&&(l="tbody"),(0===s.indexOf("<td")||0===s.indexOf("<th"))&&(l="tr"),0===s.indexOf("<tbody")&&(l="table"),0===s.indexOf("<option")&&(l="select"),o=document.createElement(l),o.innerHTML=t,r=0;r<o.childNodes.length;r++)i.push(o.childNodes[r])}else for(a=n||"#"!==t[0]||t.match(/[ .<>:~]/)?(n||document).querySelectorAll(t):[document.getElementById(t.split("#")[1])],r=0;r<a.length;r++)a[r]&&i.push(a[r])}else if(t.nodeType||t===window||t===document)i.push(t);else if(t.length>0&&t[0].nodeType)for(r=0;r<t.length;r++)i.push(t[r]);return new e(i)};return e.prototype={addClass:function(e){if("undefined"==typeof e)return this;for(var t=e.split(" "),n=0;n<t.length;n++)for(var i=0;i<this.length;i++)this[i].classList.add(t[n]);return this},removeClass:function(e){for(var t=e.split(" "),n=0;n<t.length;n++)for(var i=0;i<this.length;i++)this[i].classList.remove(t[n]);return this},hasClass:function(e){return this[0]?this[0].classList.contains(e):!1},toggleClass:function(e){for(var t=e.split(" "),n=0;n<t.length;n++)for(var i=0;i<this.length;i++)this[i].classList.toggle(t[n]);return this},attr:function(e,t){if(1===arguments.length&&"string"==typeof e)return this[0]?this[0].getAttribute(e):void 0;for(var n=0;n<this.length;n++)if(2===arguments.length)this[n].setAttribute(e,t);else for(var i in e)this[n][i]=e[i],this[n].setAttribute(i,e[i]);return this},removeAttr:function(e){for(var t=0;t<this.length;t++)this[t].removeAttribute(e);return this},data:function(e,t){if("undefined"==typeof t){if(this[0]){var n=this[0].getAttribute("data-"+e);return n?n:this[0].dom7ElementDataStorage&&e in this[0].dom7ElementDataStorage?this[0].dom7ElementDataStorage[e]:void 0}return void 0}for(var i=0;i<this.length;i++){var r=this[i];r.dom7ElementDataStorage||(r.dom7ElementDataStorage={}),r.dom7ElementDataStorage[e]=t}return this},transform:function(e){for(var t=0;t<this.length;t++){var n=this[t].style;n.webkitTransform=n.MsTransform=n.msTransform=n.MozTransform=n.OTransform=n.transform=e}return this},transition:function(e){"string"!=typeof e&&(e+="ms");for(var t=0;t<this.length;t++){var n=this[t].style;n.webkitTransitionDuration=n.MsTransitionDuration=n.msTransitionDuration=n.MozTransitionDuration=n.OTransitionDuration=n.transitionDuration=e}return this},on:function(e,n,i,r){function a(e){var r=e.target;if(t(r).is(n))i.call(r,e);else for(var a=t(r).parents(),o=0;o<a.length;o++)t(a[o]).is(n)&&i.call(a[o],e)}var o,s,l=e.split(" ");for(o=0;o<this.length;o++)if("function"==typeof n||n===!1)for("function"==typeof n&&(i=arguments[1],r=arguments[2]||!1),s=0;s<l.length;s++)this[o].addEventListener(l[s],i,r);else for(s=0;s<l.length;s++)this[o].dom7LiveListeners||(this[o].dom7LiveListeners=[]),this[o].dom7LiveListeners.push({listener:i,liveListener:a}),this[o].addEventListener(l[s],a,r);return this},off:function(e,t,n,i){for(var r=e.split(" "),a=0;a<r.length;a++)for(var o=0;o<this.length;o++)if("function"==typeof t||t===!1)"function"==typeof t&&(n=arguments[1],i=arguments[2]||!1),this[o].removeEventListener(r[a],n,i);else if(this[o].dom7LiveListeners)for(var s=0;s<this[o].dom7LiveListeners.length;s++)this[o].dom7LiveListeners[s].listener===n&&this[o].removeEventListener(r[a],this[o].dom7LiveListeners[s].liveListener,i);return this},once:function(e,t,n,i){function r(o){n(o),a.off(e,t,r,i)}var a=this;"function"==typeof t&&(t=!1,n=arguments[1],i=arguments[2]),a.on(e,t,r,i)},trigger:function(e,t){for(var n=0;n<this.length;n++){var i;try{i=new window.CustomEvent(e,{detail:t,bubbles:!0,cancelable:!0})}catch(r){i=document.createEvent("Event"),i.initEvent(e,!0,!0),i.detail=t}this[n].dispatchEvent(i)}return this},transitionEnd:function(e){function t(a){if(a.target===this)for(e.call(this,a),n=0;n<i.length;n++)r.off(i[n],t)}var n,i=["webkitTransitionEnd","transitionend","oTransitionEnd","MSTransitionEnd","msTransitionEnd"],r=this;if(e)for(n=0;n<i.length;n++)r.on(i[n],t);return this},width:function(){return this[0]===window?window.innerWidth:this.length>0?parseFloat(this.css("width")):null},outerWidth:function(e){return this.length>0?e?this[0].offsetWidth+parseFloat(this.css("margin-right"))+parseFloat(this.css("margin-left")):this[0].offsetWidth:null},height:function(){return this[0]===window?window.innerHeight:this.length>0?parseFloat(this.css("height")):null},outerHeight:function(e){return this.length>0?e?this[0].offsetHeight+parseFloat(this.css("margin-top"))+parseFloat(this.css("margin-bottom")):this[0].offsetHeight:null},offset:function(){if(this.length>0){var e=this[0],t=e.getBoundingClientRect(),n=document.body,i=e.clientTop||n.clientTop||0,r=e.clientLeft||n.clientLeft||0,a=window.pageYOffset||e.scrollTop,o=window.pageXOffset||e.scrollLeft;return{top:t.top+a-i,left:t.left+o-r}}return null},css:function(e,t){var n;if(1===arguments.length){if("string"!=typeof e){for(n=0;n<this.length;n++)for(var i in e)this[n].style[i]=e[i];return this}if(this[0])return window.getComputedStyle(this[0],null).getPropertyValue(e)}if(2===arguments.length&&"string"==typeof e){for(n=0;n<this.length;n++)this[n].style[e]=t;return this}return this},each:function(e){for(var t=0;t<this.length;t++)e.call(this[t],t,this[t]);return this},html:function(e){if("undefined"==typeof e)return this[0]?this[0].innerHTML:void 0;for(var t=0;t<this.length;t++)this[t].innerHTML=e;return this},is:function(n){if(!this[0])return!1;var i,r;if("string"==typeof n){var a=this[0];if(a===document)return n===document;if(a===window)return n===window;if(a.matches)return a.matches(n);if(a.webkitMatchesSelector)return a.webkitMatchesSelector(n);if(a.mozMatchesSelector)return a.mozMatchesSelector(n);if(a.msMatchesSelector)return a.msMatchesSelector(n);for(i=t(n),r=0;r<i.length;r++)if(i[r]===this[0])return!0;return!1}if(n===document)return this[0]===document;if(n===window)return this[0]===window;if(n.nodeType||n instanceof e){for(i=n.nodeType?[n]:n,r=0;r<i.length;r++)if(i[r]===this[0])return!0;return!1}return!1},index:function(){if(this[0]){for(var e=this[0],t=0;null!==(e=e.previousSibling);)1===e.nodeType&&t++;return t}return void 0},eq:function(t){if("undefined"==typeof t)return this; var n,i=this.length;return t>i-1?new e([]):0>t?(n=i+t,new e(0>n?[]:[this[n]])):new e([this[t]])},append:function(t){var n,i;for(n=0;n<this.length;n++)if("string"==typeof t){var r=document.createElement("div");for(r.innerHTML=t;r.firstChild;)this[n].appendChild(r.firstChild)}else if(t instanceof e)for(i=0;i<t.length;i++)this[n].appendChild(t[i]);else this[n].appendChild(t);return this},prepend:function(t){var n,i;for(n=0;n<this.length;n++)if("string"==typeof t){var r=document.createElement("div");for(r.innerHTML=t,i=r.childNodes.length-1;i>=0;i--)this[n].insertBefore(r.childNodes[i],this[n].childNodes[0])}else if(t instanceof e)for(i=0;i<t.length;i++)this[n].insertBefore(t[i],this[n].childNodes[0]);else this[n].insertBefore(t,this[n].childNodes[0]);return this},insertBefore:function(e){for(var n=t(e),i=0;i<this.length;i++)if(1===n.length)n[0].parentNode.insertBefore(this[i],n[0]);else if(n.length>1)for(var r=0;r<n.length;r++)n[r].parentNode.insertBefore(this[i].cloneNode(!0),n[r])},insertAfter:function(e){for(var n=t(e),i=0;i<this.length;i++)if(1===n.length)n[0].parentNode.insertBefore(this[i],n[0].nextSibling);else if(n.length>1)for(var r=0;r<n.length;r++)n[r].parentNode.insertBefore(this[i].cloneNode(!0),n[r].nextSibling)},next:function(n){return new e(this.length>0?n?this[0].nextElementSibling&&t(this[0].nextElementSibling).is(n)?[this[0].nextElementSibling]:[]:this[0].nextElementSibling?[this[0].nextElementSibling]:[]:[])},nextAll:function(n){var i=[],r=this[0];if(!r)return new e([]);for(;r.nextElementSibling;){var a=r.nextElementSibling;n?t(a).is(n)&&i.push(a):i.push(a),r=a}return new e(i)},prev:function(n){return new e(this.length>0?n?this[0].previousElementSibling&&t(this[0].previousElementSibling).is(n)?[this[0].previousElementSibling]:[]:this[0].previousElementSibling?[this[0].previousElementSibling]:[]:[])},prevAll:function(n){var i=[],r=this[0];if(!r)return new e([]);for(;r.previousElementSibling;){var a=r.previousElementSibling;n?t(a).is(n)&&i.push(a):i.push(a),r=a}return new e(i)},parent:function(e){for(var n=[],i=0;i<this.length;i++)e?t(this[i].parentNode).is(e)&&n.push(this[i].parentNode):n.push(this[i].parentNode);return t(t.unique(n))},parents:function(e){for(var n=[],i=0;i<this.length;i++)for(var r=this[i].parentNode;r;)e?t(r).is(e)&&n.push(r):n.push(r),r=r.parentNode;return t(t.unique(n))},find:function(t){for(var n=[],i=0;i<this.length;i++)for(var r=this[i].querySelectorAll(t),a=0;a<r.length;a++)n.push(r[a]);return new e(n)},children:function(n){for(var i=[],r=0;r<this.length;r++)for(var a=this[r].childNodes,o=0;o<a.length;o++)n?1===a[o].nodeType&&t(a[o]).is(n)&&i.push(a[o]):1===a[o].nodeType&&i.push(a[o]);return new e(t.unique(i))},remove:function(){for(var e=0;e<this.length;e++)this[e].parentNode&&this[e].parentNode.removeChild(this[e]);return this},add:function(){var e,n,i=this;for(e=0;e<arguments.length;e++){var r=t(arguments[e]);for(n=0;n<r.length;n++)i[i.length]=r[n],i.length++}return i}},t.fn=e.prototype,t.unique=function(e){for(var t=[],n=0;n<e.length;n++)-1===t.indexOf(e[n])&&t.push(e[n]);return t},t}()),r=["jQuery","Zepto","Dom7"],a=0;a<r.length;a++)window[r[a]]&&e(window[r[a]]);var o;o="undefined"==typeof i?window.Dom7||window.Zepto||window.jQuery:i,o&&("transitionEnd"in o.fn||(o.fn.transitionEnd=function(e){function t(a){if(a.target===this)for(e.call(this,a),n=0;n<i.length;n++)r.off(i[n],t)}var n,i=["webkitTransitionEnd","transitionend","oTransitionEnd","MSTransitionEnd","msTransitionEnd"],r=this;if(e)for(n=0;n<i.length;n++)r.on(i[n],t);return this}),"transform"in o.fn||(o.fn.transform=function(e){for(var t=0;t<this.length;t++){var n=this[t].style;n.webkitTransform=n.MsTransform=n.msTransform=n.MozTransform=n.OTransform=n.transform=e}return this}),"transition"in o.fn||(o.fn.transition=function(e){"string"!=typeof e&&(e+="ms");for(var t=0;t<this.length;t++){var n=this[t].style;n.webkitTransitionDuration=n.MsTransitionDuration=n.msTransitionDuration=n.MozTransitionDuration=n.OTransitionDuration=n.transitionDuration=e}return this})),ionic.views.Swiper=n}(),function(e){"use strict";e.views.Toggle=e.views.View.inherit({initialize:function(t){var n=this;this.el=t.el,this.checkbox=t.checkbox,this.track=t.track,this.handle=t.handle,this.openPercent=-1,this.onChange=t.onChange||function(){},this.triggerThreshold=t.triggerThreshold||20,this.dragStartHandler=function(e){n.dragStart(e)},this.dragHandler=function(e){n.drag(e)},this.holdHandler=function(e){n.hold(e)},this.releaseHandler=function(e){n.release(e)},this.dragStartGesture=e.onGesture("dragstart",this.dragStartHandler,this.el),this.dragGesture=e.onGesture("drag",this.dragHandler,this.el),this.dragHoldGesture=e.onGesture("hold",this.holdHandler,this.el),this.dragReleaseGesture=e.onGesture("release",this.releaseHandler,this.el)},destroy:function(){e.offGesture(this.dragStartGesture,"dragstart",this.dragStartGesture),e.offGesture(this.dragGesture,"drag",this.dragGesture),e.offGesture(this.dragHoldGesture,"hold",this.holdHandler),e.offGesture(this.dragReleaseGesture,"release",this.releaseHandler)},tap:function(){"disabled"!==this.el.getAttribute("disabled")&&this.val(!this.checkbox.checked)},dragStart:function(e){this.checkbox.disabled||(this._dragInfo={width:this.el.offsetWidth,left:this.el.offsetLeft,right:this.el.offsetLeft+this.el.offsetWidth,triggerX:this.el.offsetWidth/2,initialState:this.checkbox.checked},e.gesture.srcEvent.preventDefault(),this.hold(e))},drag:function(t){var n=this;this._dragInfo&&(t.gesture.srcEvent.preventDefault(),e.requestAnimationFrame(function(){if(n._dragInfo){var e=t.gesture.touches[0].pageX-n._dragInfo.left,i=n._dragInfo.width-n.triggerThreshold;n._dragInfo.initialState?e<n.triggerThreshold?n.setOpenPercent(0):e>n._dragInfo.triggerX&&n.setOpenPercent(100):e<n._dragInfo.triggerX?n.setOpenPercent(0):e>i&&n.setOpenPercent(100)}}))},endDrag:function(){this._dragInfo=null},hold:function(){this.el.classList.add("dragging")},release:function(e){this.el.classList.remove("dragging"),this.endDrag(e)},setOpenPercent:function(t){if(this.openPercent<0||t<this.openPercent-3||t>this.openPercent+3)if(this.openPercent=t,0===t)this.val(!1);else if(100===t)this.val(!0);else{var n=Math.round(t/100*this.track.offsetWidth-this.handle.offsetWidth);n=1>n?0:n,this.handle.style[e.CSS.TRANSFORM]="translate3d("+n+"px,0,0)"}},val:function(t){return(t===!0||t===!1)&&(""!==this.handle.style[e.CSS.TRANSFORM]&&(this.handle.style[e.CSS.TRANSFORM]=""),this.checkbox.checked=t,this.openPercent=t?100:0,this.onChange&&this.onChange()),this.checkbox.checked}})}(ionic)}();
* Copyright 2015 Drifty Co.
server.go
package factorio import ( "bufio" "encoding/json" "errors" "io" "io/ioutil" "log" "os" "os/exec" "path/filepath" "regexp" "strconv" "strings" "sync" "github.com/OpenFactorioServerManager/factorio-server-manager/api/websocket" "github.com/OpenFactorioServerManager/factorio-server-manager/bootstrap" "github.com/OpenFactorioServerManager/rcon" ) type Server struct { Cmd *exec.Cmd `json:"-"` Savefile string `json:"savefile"` Latency int `json:"latency"` BindIP string `json:"bindip"` Port int `json:"port"` Running bool `json:"running"` Version Version `json:"fac_version"` BaseModVersion string `json:"base_mod_version"` StdOut io.ReadCloser `json:"-"` StdErr io.ReadCloser `json:"-"` StdIn io.WriteCloser `json:"-"` Settings map[string]interface{} `json:"-"` Rcon *rcon.RemoteConsole `json:"-"` LogChan chan []string `json:"-"` } var instantiated Server var once sync.Once func (server *Server) SetRunning(newState bool) { if server.Running != newState { log.Println("new state, will also send to correct room") server.Running = newState wsRoom := websocket.WebsocketHub.GetRoom("server_status") response, _ := json.Marshal(server) wsRoom.Send(string(response)) } } func (server *Server) GetRunning() bool { return server.Running } func (server *Server) autostart() { var err error if server.BindIP == "" { server.BindIP = "0.0.0.0" } if server.Port == 0 { server.Port = 34197 } server.Savefile = "Load Latest" err = server.Run() if err != nil { log.Printf("Error starting Factorio server: %+v", err) return } } func SetFactorioServer(server Server) { instantiated = server } func NewFactorioServer() (err error) { server := Server{} server.Settings = make(map[string]interface{}) config := bootstrap.GetConfig() if err = os.MkdirAll(config.FactorioConfigDir, 0755); err != nil { log.Printf("failed to create config directory: %v", err) return } settingsPath := config.SettingsFile var settings *os.File if _, err = os.Stat(settingsPath); os.IsNotExist(err) { // copy example settings to supplied settings file, if not exists log.Printf("Server settings at %s not found, copying example server settings.\n", settingsPath) examplePath := filepath.Join(config.FactorioDir, "data", "server-settings.example.json") var example *os.File example, err = os.Open(examplePath) if err != nil { log.Printf("failed to open example server settings: %v", err) return } defer example.Close() settings, err = os.Create(settingsPath) if err != nil { log.Printf("failed to create server settings file: %v", err) return } defer settings.Close() _, err = io.Copy(settings, example) if err != nil { log.Printf("failed to copy example server settings: %v", err) return } err = example.Close() if err != nil { log.Printf("failed to close example server settings: %s", err) return } } else { // otherwise, open file normally settings, err = os.Open(settingsPath) if err != nil { log.Printf("failed to open server settings file: %v", err) return } defer settings.Close() } // before reading reset offset if _, err = settings.Seek(0, 0); err != nil { log.Printf("error while seeking in settings file: %v", err) return } if err = json.NewDecoder(settings).Decode(&server.Settings); err != nil { log.Printf("error reading %s: %v", settingsPath, err) return } log.Printf("Loaded Factorio settings from %s\n", settingsPath) out := []byte{} //Load factorio version if config.GlibcCustom == "true" { out, err = exec.Command(config.GlibcLocation, "--library-path", config.GlibcLibLoc, config.FactorioBinary, "--version").Output() } else { out, err = exec.Command(config.FactorioBinary, "--version").Output() } if err != nil { log.Printf("error on loading factorio version: %s", err) return } reg := regexp.MustCompile("Version.*?((\\d+\\.)?(\\d+\\.)?(\\*|\\d+)+)") found := reg.FindStringSubmatch(string(out)) err = server.Version.UnmarshalText([]byte(found[1])) if err != nil { log.Printf("could not parse version: %v", err) return } //Load baseMod version baseModInfoFile := filepath.Join(config.FactorioBaseModDir, "info.json") bmifBa, err := ioutil.ReadFile(baseModInfoFile) if err != nil { log.Printf("couldn't open baseMods info.json: %s", err) return } var modInfo ModInfo err = json.Unmarshal(bmifBa, &modInfo) if err != nil { log.Printf("error unmarshalling baseMods info.json to a modInfo: %s", err) return } server.BaseModVersion = modInfo.Version // load admins from additional file if (server.Version.Greater(Version{0, 17, 0})) { if _, err = os.Stat(config.FactorioAdminFile); os.IsNotExist(err) { //save empty admins-file err = ioutil.WriteFile(config.FactorioAdminFile, []byte("[]"), 0664) server.Settings["admins"] = make([]string, 0) } else { var data []byte data, err = ioutil.ReadFile(config.FactorioAdminFile) if err != nil { log.Printf("Error loading FactorioAdminFile: %s", err) return } var jsonData interface{} err = json.Unmarshal(data, &jsonData) if err != nil { log.Printf("Error unmarshalling FactorioAdminFile: %s", err) return } server.Settings["admins"] = jsonData } } SetFactorioServer(server) // autostart factorio is configured to do so if config.Autostart == "true" { go instantiated.autostart() } return } func GetFactorioServer() (f *Server) { return &instantiated } func (server *Server) Run() error { var err error config := bootstrap.GetConfig() data, err := json.MarshalIndent(server.Settings, "", " ") if err != nil { log.Println("Failed to marshal FactorioServerSettings: ", err) } else { ioutil.WriteFile(config.SettingsFile, data, 0644) } saves, err := ListSaves(config.FactorioSavesDir) if err != nil { log.Println("Failed to get saves list: ", err) } if len(saves) == 0 { return errors.New("No savefile exists on the server") } args := []string{} //The factorio server refenences its executable-path, since we execute the ld.so file and pass the factorio binary as a parameter //the game would use the path to the ld.so file as it's executable path and crash, to prevent this the parameter "--executable-path" is added if config.GlibcCustom == "true" { log.Println("Custom glibc selected, glibc.so location:", config.GlibcLocation, " lib location:", config.GlibcLibLoc) args = append(args, "--library-path", config.GlibcLibLoc, config.FactorioBinary, "--executable-path", config.FactorioBinary) } args = append(args, "--bind", server.BindIP, "--port", strconv.Itoa(server.Port), "--server-settings", config.SettingsFile, "--rcon-port", strconv.Itoa(config.FactorioRconPort), "--rcon-password", config.FactorioRconPass) if (server.Version.Greater(Version{0, 17, 0})) { args = append(args, "--server-adminlist", config.FactorioAdminFile) } if server.Savefile == "Load Latest" { args = append(args, "--start-server-load-latest") } else { args = append(args, "--start-server", filepath.Join(config.FactorioSavesDir, server.Savefile)) } if config.GlibcCustom == "true" { log.Println("Starting server with command: ", config.GlibcLocation, args) server.Cmd = exec.Command(config.GlibcLocation, args...) } else { log.Println("Starting server with command: ", config.FactorioBinary, args) server.Cmd = exec.Command(config.FactorioBinary, args...) } // Write chat log to a different file if requested (if not it will be mixed-in with the default logfile) if config.ChatLogFile != "" { args = append(args, "--console-log", config.ChatLogFile) } server.StdOut, err = server.Cmd.StdoutPipe() if err != nil { log.Printf("Error opening stdout pipe: %s", err) return err } server.StdIn, err = server.Cmd.StdinPipe() if err != nil { log.Printf("Error opening stdin pipe: %s", err) return err } server.StdErr, err = server.Cmd.StderrPipe() if err != nil { log.Printf("Error opening stderr pipe: %s", err) return err } go server.parseRunningCommand(server.StdOut) go server.parseRunningCommand(server.StdErr) err = server.Cmd.Start() if err != nil { log.Printf("Factorio process failed to start: %s", err) return err } server.SetRunning(true) err = server.Cmd.Wait() log.Printf("Factorio process is closed") server.SetRunning(false) if err != nil { log.Printf("Factorio process exited with error: %s", err) return err } return nil } func (server *Server) parseRunningCommand(std io.ReadCloser) (err error) { stdScanner := bufio.NewScanner(std) for stdScanner.Scan() { text := stdScanner.Text() log.Printf("Factorio Server: %s", text) if err := server.writeLog(text); err != nil { log.Printf("Error: %s", err) } // send the reported line per websocket wsRoom := websocket.WebsocketHub.GetRoom("gamelog") go wsRoom.Send(text) line := strings.Fields(text) // Ensure logline slice is in bounds if len(line) > 1 { // Check if Factorio Server reports any errors if so handle it if line[1] == "Error" { err := server.checkLogError(line) if err != nil { log.Printf("Error checking Factorio Server Error: %s", err) } } // If rcon port opens indicated in log connect to rcon rconLog := "Starting RCON interface at IP" // check if slice index is greater than 2 to prevent panic if len(line) > 2 { // log line for opened rcon connection if strings.Contains(text, rconLog) { log.Printf("Rcon running on Factorio Server") err = connectRC() if err != nil
} server.checkProcessHealth(text) } } } if err := stdScanner.Err(); err != nil { log.Printf("Error reading std buffer: %s", err) return err } return nil } func (server *Server) writeLog(logline string) error { config := bootstrap.GetConfig() logfileName := config.ConsoleLogFile file, err := os.OpenFile(logfileName, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644) if err != nil { log.Printf("Cannot open logfile %s for appending Factorio Server output: %s", logfileName, err) return err } defer file.Close() logline = logline + "\n" if _, err = file.WriteString(logline); err != nil { log.Printf("Error appending to %s: %s", logfileName, err) return err } return nil } func (server *Server) checkLogError(logline []string) error { // TODO Handle errors generated by running Factorio Server log.Println(logline) return nil } func init() { websocket.WebsocketHub.RegisterControlHandler <- serverWebsocketControl } // react to websocket control messages and run the command if it is requested func serverWebsocketControl(controls websocket.WsControls) { log.Println(controls) if controls.Type == "command" { command := controls.Value server := GetFactorioServer() if server.GetRunning() { log.Printf("Received command: %v", command) reqId, err := server.Rcon.Write(command) if err != nil { log.Printf("Error sending rcon command: %s", err) return } log.Printf("Command send to Factorio: %s, with rcon request id: %v", command, reqId) } } }
{ log.Printf("Error: %s", err) }
create_spring.go
package create import ( "fmt" "github.com/jenkins-x/jx/pkg/cmd/importcmd" "os" "github.com/jenkins-x/jx/pkg/cmd/helper" "github.com/jenkins-x/jx/pkg/gits" "github.com/spf13/cobra" "github.com/jenkins-x/jx/pkg/cmd/opts" "github.com/jenkins-x/jx/pkg/cmd/templates" "github.com/jenkins-x/jx/pkg/log" "github.com/jenkins-x/jx/pkg/spring" "github.com/jenkins-x/jx/pkg/util" ) var ( createSpringLong = templates.LongDesc(` Creates a new Spring Boot application and then optionally setups CI/CD pipelines and GitOps promotion. You can see a demo of this command here: [https://jenkins-x.io/demos/create_spring/](https://jenkins-x.io/demos/create_spring/) For more documentation see: [https://jenkins-x.io/developing/create-spring/](https://jenkins-x.io/developing/create-spring/)
` + opts.SeeAlsoText("jx create project")) createSpringExample = templates.Examples(` # Create a Spring Boot application where you use the terminal to pick the values jx create spring # Creates a Spring Boot application passing in the required dependencies jx create spring -d web -d actuator # To pick the advanced options (such as what package type maven-project/gradle-project) etc then use jx create spring -x # To create a gradle project use: jx create spring --type gradle-project `) ) // CreateSpringOptions the options for the create spring command type CreateSpringOptions struct { CreateProjectOptions Advanced bool SpringForm spring.SpringBootForm } // NewCmdCreateSpring creates a command object for the "create" command func NewCmdCreateSpring(commonOpts *opts.CommonOptions) *cobra.Command { options := &CreateSpringOptions{ CreateProjectOptions: CreateProjectOptions{ ImportOptions: importcmd.ImportOptions{ CommonOptions: commonOpts, }, }, } cmd := &cobra.Command{ Use: "spring", Short: "Create a new Spring Boot application and import the generated code into Git and Jenkins for CI/CD", Long: createSpringLong, Example: createSpringExample, Run: func(cmd *cobra.Command, args []string) { options.Cmd = cmd options.Args = args err := options.Run() helper.CheckErr(err) }, } options.addCreateAppFlags(cmd) cmd.Flags().BoolVarP(&options.Advanced, "advanced", "x", false, "Advanced mode can show more detailed forms for some resource kinds like springboot") cmd.Flags().StringArrayVarP(&options.SpringForm.DependencyKinds, spring.OptionDependencyKind, "k", spring.DefaultDependencyKinds, "Default dependency kinds to choose from") cmd.Flags().StringArrayVarP(&options.SpringForm.Dependencies, spring.OptionDependency, "d", []string{}, "Spring Boot dependencies") cmd.Flags().StringVarP(&options.SpringForm.GroupId, spring.OptionGroupId, "g", "", "Group ID to generate") cmd.Flags().StringVarP(&options.SpringForm.ArtifactId, spring.OptionArtifactId, "a", "", "Artifact ID to generate") cmd.Flags().StringVarP(&options.SpringForm.Language, spring.OptionLanguage, "l", "", "Language to generate") cmd.Flags().StringVarP(&options.SpringForm.BootVersion, spring.OptionBootVersion, "t", "", "Spring Boot version") cmd.Flags().StringVarP(&options.SpringForm.JavaVersion, spring.OptionJavaVersion, "j", "", "Java version") cmd.Flags().StringVarP(&options.SpringForm.Packaging, spring.OptionPackaging, "p", "", "Packaging") cmd.Flags().StringVarP(&options.SpringForm.Type, spring.OptionType, "", "", "Project Type (such as maven-project or gradle-project)") return cmd } // Run implements the command func (o *CreateSpringOptions) Run() error { cacheDir, err := util.CacheDir() if err != nil { return err } data := &o.SpringForm var details *gits.CreateRepoData if !o.BatchMode { details, err = o.GetGitRepositoryDetails() if err != nil { return err } data.ArtifactId = details.RepoName } model, err := spring.LoadSpringBoot(cacheDir) if err != nil { return fmt.Errorf("Failed to load Spring Boot model %s", err) } err = model.CreateSurvey(&o.SpringForm, o.Advanced, o.BatchMode) if err != nil { return err } // always add in actuator as its required for health checking if !util.Contains(o.SpringForm.Dependencies, "actuator") { o.SpringForm.Dependencies = append(o.SpringForm.Dependencies, "actuator") } // always add web as the JVM tends to terminate if its not added if !util.Contains(o.SpringForm.Dependencies, "web") { o.SpringForm.Dependencies = append(o.SpringForm.Dependencies, "web") } dir := o.OutDir if dir == "" { dir, err = os.Getwd() if err != nil { return err } } outDir, err := data.CreateProject(dir) if err != nil { return err } log.Logger().Infof("Created Spring Boot project at %s", util.ColorInfo(outDir)) if details != nil { o.ConfigureImportOptions(details) } return o.ImportCreatedProject(outDir) }
display.rs
//! This module contains utilities for rendering syntax nodes into a string representing their signature. use crate::ast::{self, HasGenericParams, HasName}; use ast::HasVisibility; use stdx::format_to; pub fn function_declaration(node: &ast::Fn) -> String
{ let mut buf = String::new(); if let Some(vis) = node.visibility() { format_to!(buf, "{} ", vis); } if node.async_token().is_some() { format_to!(buf, "async "); } if node.const_token().is_some() { format_to!(buf, "const "); } if node.unsafe_token().is_some() { format_to!(buf, "unsafe "); } if let Some(abi) = node.abi() { // Keyword `extern` is included in the string. format_to!(buf, "{} ", abi); } if let Some(name) = node.name() { format_to!(buf, "fn {}", name); } if let Some(type_params) = node.generic_param_list() { format_to!(buf, "{}", type_params); } if let Some(param_list) = node.param_list() { let params: Vec<String> = param_list .self_param() .into_iter() .map(|self_param| self_param.to_string()) .chain(param_list.params().map(|param| param.to_string())) .collect(); // Useful to inline parameters format_to!(buf, "({})", params.join(", ")); } if let Some(ret_type) = node.ret_type() { if ret_type.ty().is_some() { format_to!(buf, " {}", ret_type); } } if let Some(where_clause) = node.where_clause() { format_to!(buf, "\n{}", where_clause); } buf }
commands.go
package iroha // #cgo CFLAGS: -I ../../../../irohad // #cgo LDFLAGS: -Wl,-unresolved-symbols=ignore-all // #include "ametsuchi/impl/proto_command_executor.h" // #include "ametsuchi/impl/proto_specific_query_executor.h" import "C" import ( "fmt" "time" "unsafe" pb "iroha.protocol" "github.com/golang/protobuf/proto" ) var ( IrohaCommandExecutor unsafe.Pointer IrohaQueryExecutor unsafe.Pointer Caller string ) // -----------------------Iroha commands--------------------------------------- /* Transfer assets between accounts */ func TransferAsset(src, dst, asset, amount string) error { command := &pb.Command{Command: &pb.Command_TransferAsset{ TransferAsset: &pb.TransferAsset{ SrcAccountId: src, DestAccountId: dst, AssetId: asset, Description: "EVM asset transfer", Amount: amount, }}} commandResult, err := makeProtobufCmdAndExecute(IrohaCommandExecutor, command) if err != nil { return err } if commandResult.error_code != 0 { error_extra := "" error_extra_ptr := commandResult.error_extra.toStringAndRelease() if error_extra_ptr != nil { error_extra = ": " + *error_extra_ptr } return fmt.Errorf("Error executing TransferAsset command: %s", error_extra) } return nil } // -----------------------Iroha queries--------------------------------------- // Queries asset balance of an account func GetAccountAssets(accountID string) ([]*pb.AccountAsset, error) { query := &pb.Query{Payload: &pb.Query_Payload{ Meta: &pb.QueryPayloadMeta{ CreatedTime: uint64(time.Now().UnixNano() / int64(time.Millisecond)), CreatorAccountId: Caller, QueryCounter: 1}, Query: &pb.Query_Payload_GetAccountAssets{ GetAccountAssets: &pb.GetAccountAssets{AccountId: accountID}}}} queryResponse, err := makeProtobufQueryAndExecute(IrohaQueryExecutor, query) if err != nil { return []*pb.AccountAsset{}, err } switch response := queryResponse.Response.(type) { case *pb.QueryResponse_ErrorResponse: return []*pb.AccountAsset{}, fmt.Errorf( "ErrorResponse in GetIrohaAccountAssets: %d, %v", response.ErrorResponse.ErrorCode, response.ErrorResponse.Message, ) case *pb.QueryResponse_AccountAssetsResponse: accountAssetsResponse := queryResponse.GetAccountAssetsResponse() return accountAssetsResponse.AccountAssets, nil default: return []*pb.AccountAsset{}, fmt.Errorf("Wrong response type in GetIrohaAccountAssets") } } // -----------------------Helper functions--------------------------------------- // Execute Iroha command func makeProtobufCmdAndExecute(cmdExecutor unsafe.Pointer, command *pb.Command) (res *C.Iroha_CommandError, err error) { out, err := proto.Marshal(command) if err != nil
cOut := C.CBytes(out) commandResult := C.Iroha_ProtoCommandExecutorExecute(cmdExecutor, cOut, C.int(len(out)), C.CString(Caller)) return &commandResult, nil } // Perform Iroha query func makeProtobufQueryAndExecute(queryExecutor unsafe.Pointer, query *pb.Query) (res *pb.QueryResponse, err error) { out, err := proto.Marshal(query) if err != nil { return nil, err } cOut := C.CBytes(out) queryResult := C.Iroha_ProtoSpecificQueryExecutorExecute(queryExecutor, cOut, C.int(len(out))) out = C.GoBytes(queryResult.data, queryResult.size) queryResponse := &pb.QueryResponse{} err = proto.Unmarshal(out, queryResponse) if err != nil { return nil, err } return queryResponse, nil }
{ // magic constant, if not 0 => fail happened return &C.Iroha_CommandError{error_code: 100}, err }
index.js
import React from 'react'; import { Link } from 'react-router-dom'; import styles from './index.module.scss'; function NavigationItem(props) { return ( <li className={styles.item}> <Link className={styles.link} to={props.link}>{props.children}</Link> </li>
export default NavigationItem;
) }
base.go
// Copyright 2015 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package bind import ( "context" "errors" "fmt" "math/big" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/event" ) // SignerFn is a signer function callback when a contract requires a method to // sign the transaction before submission. type SignerFn func(types.Signer, common.Address, *types.Transaction) (*types.Transaction, error) // CallOpts is the collection of options to fine tune a contract call request. type CallOpts struct { Pending bool // Whether to operate on the pending state or the last known one From common.Address // Optional the sender address, otherwise the first account is used BlockNumber *big.Int // Optional the block number on which the call should be performed Context context.Context // Network context to support cancellation and timeouts (nil = no timeout) } // TransactOpts is the collection of authorization data required to create a // valid Ethereum transaction. type TransactOpts struct { From common.Address // Ethereum account to send the transaction from Nonce *big.Int // Nonce to use for the transaction execution (nil = use pending state) Signer SignerFn // Method to use for signing the transaction (mandatory) Value *big.Int // Funds to transfer along along the transaction (nil = 0 = no funds) GasPrice *big.Int // Gas price to use for the transaction execution (nil = gas price oracle) GasLimit uint64 // Gas limit to set for the transaction execution (0 = estimate) Context context.Context // Network context to support cancellation and timeouts (nil = no timeout) } // FilterOpts is the collection of options to fine tune filtering for events // within a bound contract. type FilterOpts struct { Start uint64 // Start of the queried range End *uint64 // End of the range (nil = latest) Context context.Context // Network context to support cancellation and timeouts (nil = no timeout) } // WatchOpts is the collection of options to fine tune subscribing for events // within a bound contract. type WatchOpts struct { Start *uint64 // Start of the queried range (nil = latest) Context context.Context // Network context to support cancellation and timeouts (nil = no timeout) } // BoundContract is the base wrapper object that reflects a contract on the // Ethereum network. It contains a collection of methods that are used by the // higher level contract bindings to operate. type BoundContract struct { address common.Address // Deployment address of the contract on the Ethereum blockchain abi abi.ABI // Reflect based ABI to access the correct Ethereum methods caller ContractCaller // Read interface to interact with the blockchain transactor ContractTransactor // Write interface to interact with the blockchain filterer ContractFilterer // Event filtering to interact with the blockchain } // NewBoundContract creates a low level contract interface through which calls // and transactions may be made through. func NewBoundContract(address common.Address, abi abi.ABI, caller ContractCaller, transactor ContractTransactor, filterer ContractFilterer) *BoundContract { return &BoundContract{ address: address, abi: abi, caller: caller, transactor: transactor, filterer: filterer, } } // DeployContract deploys a contract onto the Ethereum blockchain and binds the // deployment address with a Go wrapper. func DeployContract(opts *TransactOpts, abi abi.ABI, bytecode []byte, backend ContractBackend, params ...interface{}) (common.Address, *types.Transaction, *BoundContract, error) { // Otherwise try to deploy the contract c := NewBoundContract(common.Address{}, abi, backend, backend, backend) input, err := c.abi.Pack("", params...) if err != nil { return common.Address{}, nil, nil, err } tx, err := c.transact(opts, nil, append(bytecode, input...)) if err != nil { return common.Address{}, nil, nil, err } c.address = crypto.CreateAddress(opts.From, tx.Nonce()) return c.address, tx, c, nil } // Call invokes the (constant) contract method with params as input values and // sets the output to result. The result type might be a single field for simple // returns, a slice of interfaces for anonymous returns and a struct for named // returns. func (c *BoundContract) Call(opts *CallOpts, result interface{}, method string, params ...interface{}) error { // Don't crash on a lazy user if opts == nil { opts = new(CallOpts) } // Pack the input, call and unpack the results input, err := c.abi.Pack(method, params...) if err != nil { return err } var ( msg = ethereum.CallMsg{From: opts.From, To: &c.address, Data: input} ctx = ensureContext(opts.Context) code []byte output []byte ) if opts.Pending { pb, ok := c.caller.(PendingContractCaller) if !ok { return ErrNoPendingState } output, err = pb.PendingCallContract(ctx, msg) if err == nil && len(output) == 0 { // Make sure we have a contract to operate on, and bail out otherwise. if code, err = pb.PendingCodeAt(ctx, c.address); err != nil { return err } else if len(code) == 0 { return ErrNoCode } } } else { output, err = c.caller.CallContract(ctx, msg, opts.BlockNumber) if err == nil && len(output) == 0 { // Make sure we have a contract to operate on, and bail out otherwise. if code, err = c.caller.CodeAt(ctx, c.address, opts.BlockNumber); err != nil { return err } else if len(code) == 0
} } if err != nil { return err } return c.abi.Unpack(result, method, output) } // Transact invokes the (paid) contract method with params as input values. func (c *BoundContract) Transact(opts *TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { // Otherwise pack up the parameters and invoke the contract input, err := c.abi.Pack(method, params...) if err != nil { return nil, err } return c.transact(opts, &c.address, input) } // Transfer initiates a plain transaction to move funds to the contract, calling // its default method if one is available. func (c *BoundContract) Transfer(opts *TransactOpts) (*types.Transaction, error) { return c.transact(opts, &c.address, nil) } // transact executes an actual transaction invocation, first deriving any missing // authorization fields, and then scheduling the transaction for execution. func (c *BoundContract) transact(opts *TransactOpts, contract *common.Address, input []byte) (*types.Transaction, error) { var err error // Ensure a valid value field and resolve the account nonce value := opts.Value if value == nil { value = new(big.Int) } var nonce uint64 if opts.Nonce == nil { nonce, err = c.transactor.PendingNonceAt(ensureContext(opts.Context), opts.From) if err != nil { return nil, fmt.Errorf("failed to retrieve account nonce: %v", err) } } else { nonce = opts.Nonce.Uint64() } // Figure out the gas allowance and gas price values gasPrice := opts.GasPrice if gasPrice == nil { gasPrice, err = c.transactor.SuggestGasPrice(ensureContext(opts.Context)) if err != nil { return nil, fmt.Errorf("failed to suggest gas price: %v", err) } } gasLimit := opts.GasLimit if gasLimit == 0 { // Gas estimation cannot succeed without code for method invocations if contract != nil { if code, err := c.transactor.PendingCodeAt(ensureContext(opts.Context), c.address); err != nil { return nil, err } else if len(code) == 0 { return nil, ErrNoCode } } // If the contract surely has code (or code is not needed), estimate the transaction msg := ethereum.CallMsg{From: opts.From, To: contract, Value: value, Data: input} gasLimit, err = c.transactor.EstimateGas(ensureContext(opts.Context), msg) if err != nil { return nil, fmt.Errorf("failed to estimate gas needed: %v", err) } } // Create the transaction, sign it and schedule it for execution var rawTx *types.Transaction if contract == nil { rawTx = types.NewContractCreation(nonce, value, gasLimit, gasPrice, input) } else { rawTx = types.NewTransaction(nonce, c.address, value, gasLimit, gasPrice, input) } if opts.Signer == nil { return nil, errors.New("no signer to authorize the transaction with") } signedTx, err := opts.Signer(types.HomesteadSigner{}, opts.From, rawTx) if err != nil { return nil, err } if err := c.transactor.SendTransaction(ensureContext(opts.Context), signedTx); err != nil { return nil, err } return signedTx, nil } // FilterLogs filters contract logs for past blocks, returning the necessary // channels to construct a strongly typed bound iterator on top of them. func (c *BoundContract) FilterLogs(opts *FilterOpts, name string, query ...[]interface{}) (chan types.Log, event.Subscription, error) { // Don't crash on a lazy user if opts == nil { opts = new(FilterOpts) } // Append the event selector to the query parameters and construct the topic set query = append([][]interface{}{{c.abi.Events[name].Id()}}, query...) topics, err := makeTopics(query...) if err != nil { return nil, nil, err } // Start the background filtering logs := make(chan types.Log, 128) config := ethereum.FilterQuery{ Addresses: []common.Address{c.address}, Topics: topics, FromBlock: new(big.Int).SetUint64(opts.Start), } if opts.End != nil { config.ToBlock = new(big.Int).SetUint64(*opts.End) } /* TODO(karalabe): Replace the rest of the method below with this when supported sub, err := c.filterer.SubscribeFilterLogs(ensureContext(opts.Context), config, logs) */ buff, err := c.filterer.FilterLogs(ensureContext(opts.Context), config) if err != nil { return nil, nil, err } sub, err := event.NewSubscription(func(quit <-chan struct{}) error { for _, log := range buff { select { case logs <- log: case <-quit: return nil } } return nil }), nil if err != nil { return nil, nil, err } return logs, sub, nil } // WatchLogs filters subscribes to contract logs for future blocks, returning a // subscription object that can be used to tear down the watcher. func (c *BoundContract) WatchLogs(opts *WatchOpts, name string, query ...[]interface{}) (chan types.Log, event.Subscription, error) { // Don't crash on a lazy user if opts == nil { opts = new(WatchOpts) } // Append the event selector to the query parameters and construct the topic set query = append([][]interface{}{{c.abi.Events[name].Id()}}, query...) topics, err := makeTopics(query...) if err != nil { return nil, nil, err } // Start the background filtering logs := make(chan types.Log, 128) config := ethereum.FilterQuery{ Addresses: []common.Address{c.address}, Topics: topics, } if opts.Start != nil { config.FromBlock = new(big.Int).SetUint64(*opts.Start) } sub, err := c.filterer.SubscribeFilterLogs(ensureContext(opts.Context), config, logs) if err != nil { return nil, nil, err } return logs, sub, nil } // UnpackLog unpacks a retrieved log into the provided output structure. func (c *BoundContract) UnpackLog(out interface{}, event string, log types.Log) error { if len(log.Data) > 0 { if err := c.abi.Unpack(out, event, log.Data); err != nil { return err } } var indexed abi.Arguments for _, arg := range c.abi.Events[event].Inputs { if arg.Indexed { indexed = append(indexed, arg) } } return parseTopics(out, indexed, log.Topics[1:]) } // UnpackLogIntoMap unpacks a retrieved log into the provided map. func (c *BoundContract) UnpackLogIntoMap(out map[string]interface{}, event string, log types.Log) error { if len(log.Data) > 0 { if err := c.abi.UnpackIntoMap(out, event, log.Data); err != nil { return err } } var indexed abi.Arguments for _, arg := range c.abi.Events[event].Inputs { if arg.Indexed { indexed = append(indexed, arg) } } return parseTopicsIntoMap(out, indexed, log.Topics[1:]) } // ensureContext is a helper method to ensure a context is not nil, even if the // user specified it as such. func ensureContext(ctx context.Context) context.Context { if ctx == nil { return context.TODO() } return ctx }
{ return ErrNoCode }
speedtest_config.rs
use crate::{distance::EarthLocation, error::SpeedTestError}; use std::{net::Ipv4Addr, time::Duration}; pub struct SpeedTestClientConfig { pub ip: Ipv4Addr, pub isp: String, } impl Default for SpeedTestClientConfig { fn default() -> Self { SpeedTestClientConfig { ip: Ipv4Addr::new(127, 0, 0, 1), isp: String::default(), } } } #[derive(Default)] pub struct
{ pub upload: Vec<usize>, pub download: Vec<usize>, } #[derive(Default)] pub struct SpeedTestCountsConfig { pub upload: usize, pub download: usize, } #[derive(Default)] pub struct SpeedTestThreadsConfig { pub upload: usize, pub download: usize, } pub struct SpeedTestLengthConfig { pub upload: Duration, pub download: Duration, } impl Default for SpeedTestLengthConfig { fn default() -> Self { SpeedTestLengthConfig { upload: Duration::from_secs(10), download: Duration::from_secs(10), } } } #[derive(Default)] pub struct SpeedTestConfig { pub client: SpeedTestClientConfig, pub ignore_servers: Vec<u32>, pub sizes: SpeedTestSizeConfig, pub counts: SpeedTestCountsConfig, pub threads: SpeedTestThreadsConfig, pub length: SpeedTestLengthConfig, pub upload_max: usize, pub location: EarthLocation, } impl SpeedTestConfig { pub fn parse(config_xml: &str) -> Result<SpeedTestConfig, SpeedTestError> { let document = roxmltree::Document::parse(config_xml)?; let server_config_node = document .descendants() .find(|n| n.has_tag_name("server-config")) .ok_or(SpeedTestError::ConfigParseError)?; let download_node = document .descendants() .find(|n| n.has_tag_name("download")) .ok_or(SpeedTestError::ConfigParseError)?; let upload_node = document .descendants() .find(|n| n.has_tag_name("upload")) .ok_or(SpeedTestError::ConfigParseError)?; let client_node = document .descendants() .find(|n| n.has_tag_name("client")) .ok_or(SpeedTestError::ConfigParseError)?; let ignore_servers: Vec<u32> = server_config_node .attribute("ignoreids") .ok_or(SpeedTestError::ConfigParseError)? .split(',') .filter(|s| !s.is_empty()) .map(|s| s.parse::<u32>()) .collect::<Result<Vec<u32>, _>>()?; let ratio = upload_node .attribute("ratio") .ok_or(SpeedTestError::ConfigParseError)? .parse::<usize>()?; let upload_max = upload_node .attribute("maxchunkcount") .ok_or(SpeedTestError::ConfigParseError)? .parse::<usize>()?; let up_sizes = [32768usize, 65536, 131072, 262144, 524288, 1048576, 7340032]; let sizes = SpeedTestSizeConfig { upload: up_sizes .get(ratio - 1..) .ok_or(SpeedTestError::ConfigParseError)? .to_vec(), download: vec![350usize, 500, 750, 1000, 1500, 2000, 2500, 3000, 3500, 4000], }; let size_count = sizes.upload.len(); let upload_count = (upload_max as f32 / size_count as f32).ceil() as usize; let counts = SpeedTestCountsConfig { upload: upload_count, download: download_node .attribute("threadsperurl") .ok_or(SpeedTestError::ConfigParseError)? .parse::<usize>()?, }; let threads = SpeedTestThreadsConfig { upload: upload_node .attribute("threads") .ok_or(SpeedTestError::ConfigParseError)? .parse::<usize>()?, download: server_config_node .attribute("threadcount") .ok_or(SpeedTestError::ConfigParseError)? .parse::<usize>()? * 2, }; let length = SpeedTestLengthConfig { upload: upload_node .attribute("testlength") .ok_or(SpeedTestError::ConfigParseError)? .parse::<u64>() .map(Duration::from_secs)?, download: download_node .attribute("testlength") .ok_or(SpeedTestError::ConfigParseError)? .parse::<u64>() .map(Duration::from_secs)?, }; let client = SpeedTestClientConfig { ip: client_node .attribute("ip") .ok_or(SpeedTestError::ConfigParseError)? .parse()?, isp: client_node .attribute("isp") .ok_or(SpeedTestError::ConfigParseError)? .to_string(), }; Ok(SpeedTestConfig { client, ignore_servers, sizes, counts, threads, length, upload_max, location: EarthLocation { latitude: client_node .attribute("lat") .ok_or(SpeedTestError::ConfigParseError)? .parse()?, longitude: client_node .attribute("lon") .ok_or(SpeedTestError::ConfigParseError)? .parse()?, }, }) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_config_xml() { let config = SpeedTestConfig::parse(include_str!("../tests/config/config.php.xml")).unwrap(); assert_eq!("174.79.12.26", config.client.ip.to_string()); assert_eq!( EarthLocation { latitude: 32.9954, longitude: -117.0753, }, config.location ); assert_eq!("Cox Communications", config.client.isp); } #[test] fn test_parse_config_xml_83() { let config = SpeedTestConfig::parse(include_str!("../tests/config/2021-07-speedtest-config.xml")) .unwrap(); assert_eq!("Cox Communications", config.client.isp); } }
SpeedTestSizeConfig
f707f764972d550171b81ffda01b076e02f85127interfaces.go
package apptools
"time" ) type App interface { Init(Registry) } type Registry interface { Init() GetConfig() Config GetDispatchHandler() DispatchHandler GetAssetRegistry() AssetRegistry GetAssetTestRegistry() AssetRegistry } type DispatchHandler interface { Attach(func(func(w http.ResponseWriter, r *http.Request)) func(w http.ResponseWriter, r *http.Request)) AddTeamObject(string, bool) } type AssetRegistry interface { Add(string, Asset) AddAssets(map[string][]Asset) AddTemplates(map[string]Asset, map[string]Asset) } type Asset struct { Url string GetModTime func() time.Time GetReader func() io.Reader } type Config interface { Get(key string) string }
import ( "io" "net/http"
tcp.go
package main import ( "encoding/gob" "fmt" "net" ) func server() { // listen on a port ln, err := net.Listen("tcp", ":9999") if err != nil { fmt.Println(err)
c, err := ln.Accept() if err != nil { fmt.Println(err) continue } // handle the connection go handleServerConnection(c) } } func handleServerConnection(c net.Conn) { // receive the message var msg string err := gob.NewDecoder(c).Decode(&msg) if err != nil { fmt.Println(err) } else { fmt.Println("Received", msg) } c.Close() } func client() { // connect to the server c, err := net.Dial("tcp", "127.0.0.1:9999") if err != nil { fmt.Println(err) return } // send the message msg := "Hello, World" fmt.Println("Sending", msg) err = gob.NewEncoder(c).Encode(msg) if err != nil { fmt.Println(err) } c.Close() } func main() { go server() go client() var input string fmt.Scanln(&input) }
return } for { // accept a connection
database-update.ts
import { URL } from "url"; import * as clc from "cli-color"; import * as fs from "fs"; import { Client } from "../apiv2"; import { Command } from "../command"; import { Emulators } from "../emulator/types"; import { FirebaseError } from "../error"; import { populateInstanceDetails } from "../management/database"; import { printNoticeIfEmulated } from "../emulator/commandUtils"; import { promptOnce } from "../prompt"; import { realtimeOriginOrEmulatorOrCustomUrl } from "../database/api"; import { requirePermissions } from "../requirePermissions"; import * as logger from "../logger"; import { requireDatabaseInstance } from "../requireDatabaseInstance"; import * as utils from "../utils"; export default new Command("database:update <path> [infile]") .description("update some of the keys for the defined path in your Firebase") .option("-d, --data <data>", "specify escaped JSON directly") .option("-y, --confirm", "pass this option to bypass confirmation prompt")
) .before(requirePermissions, ["firebasedatabase.instances.update"]) .before(requireDatabaseInstance) .before(populateInstanceDetails) .before(printNoticeIfEmulated, Emulators.DATABASE) .action(async (path: string, infile: string | undefined, options) => { if (!path.startsWith("/")) { throw new FirebaseError("Path must begin with /"); } const origin = realtimeOriginOrEmulatorOrCustomUrl(options.instanceDetails.databaseUrl); const url = utils.getDatabaseUrl(origin, options.instance, path); if (!options.confirm) { const confirmed = await promptOnce({ type: "confirm", name: "confirm", default: false, message: `You are about to modify data at ${clc.cyan(url)}. Are you sure?`, }); if (!confirmed) { throw new FirebaseError("Command aborted."); } } const inStream = utils.stringToStream(options.data) || (infile && fs.createReadStream(infile)) || process.stdin; const jsonUrl = new URL(utils.getDatabaseUrl(origin, options.instance, path + ".json")); if (!infile && !options.data) { utils.explainStdin(); } const c = new Client({ urlPrefix: jsonUrl.origin, auth: true }); try { await c.request({ method: "PATCH", path: jsonUrl.pathname, body: inStream, }); } catch (err) { throw new FirebaseError("Unexpected error while setting data"); } utils.logSuccess("Data updated successfully"); logger.info(); logger.info( clc.bold("View data at:"), utils.getDatabaseViewDataUrl(origin, options.project, options.instance, path) ); });
.option( "--instance <instance>", "use the database <instance>.firebaseio.com (if omitted, use default database instance)"
my429_qcomponents.py
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2017, 2020. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. from qiskit_metal import draw, Dict from qiskit_metal.qlibrary.core import QComponent class MyQComponent1(QComponent): """Demonstration1 - Straight segment with variable width/length""" ### def __init__() <- comes from QComponent ### Initiaizes base variables such as self.id, self.name and self.options ### Also launches the first execution of make() ### def rebuild() <- comes from QComponent ### Clear output from previous runs of make() (geom/pin/net) and re-runs it def make(self): """calculates the geometries of the QComponent""" rect = draw.rectangle(0.5, 0.1, 0, 0) #width, height, pos_x, pos_y # add_geometry() expects shapely, thus the use of drawn module above self.add_qgeometry('poly', {'my_polygon': rect}, layer=1, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], 0.1) #name, tangent, width class MyQComponent2(QComponent): """Demonstration2 - Straight segment with variable width/length""" # Your knobs to modify the cell behavior default_options = Dict(width='0.5mm', height='0.1mm', pos_x='0mm', pos_y='0mm', layer='1') """Default drawing options""" def make(self): """calculates the geometries of the QComponent""" p = self.parse_options( ) # short-handle alias for the options interpreter rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y) self.add_qgeometry('poly', {'my_polygon': rect}, layer=p.layer, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], p.height) class MyQComponent3(QComponent): """Demonstration2 - Straight segment with variable width/length""" default_options = Dict(width='0.5mm', height='0.1mm', pos_x='0mm', pos_y='0mm', layer='1') """Default drawing options""" # Name prefix of component + import of renderer-specific default_options component_metadata = Dict( short_name='Trace', _qgeometry_table_path='False', #wirebonds _qgeometry_table_poly='True', _qgeometry_table_junction='False') #gds imports and analysis inputs """Component metadata""" def
(self): """calculates the geometries of the QComponent""" p = self.parse_options() # short-handle alias. Options interpreter rect = draw.rectangle(p.width, p.height, p.pos_x, p.pos_y) self.add_qgeometry('poly', {'my_polygon': rect}, layer=p.layer, subtract=False) self.add_pin('in', rect.exterior.coords[:-3:-1], p.height) class MyQComponent4(QComponent): """Demonstration3 - Straight segment with variable width/length""" default_options = Dict(width='0.5mm', height='0.1mm', gap='0.02mm', pos_x='0mm', pos_y='0mm', layer='1') """Default drawing options""" # Name prefix of component + import of renderer-specific default_options component_metadata = Dict( short_name='Trace', _qgeometry_table_path='True', #wirebonds _qgeometry_table_poly='False', _qgeometry_table_junction='False') #gds """Component metadata""" def make(self): """calculates the geometries of the QComponent""" p = self.parse_options() line = draw.LineString([(-p.width / 2, 0), (p.width / 2, 0)]) line = draw.translate(line, p.pos_x, p.pos_y) self.add_qgeometry('path', {'trace': line}, width=p.height, layer=p.layer, subtract=False) line2 = draw.LineString([((-p.width / 2) - 2 * p.gap, 0), ((p.width / 2) + 2 * p.gap, 0)]) line2 = draw.translate(line2, p.pos_x, p.pos_y) self.add_qgeometry('path', {'cut': line2}, width=p.height + 2 * p.gap, layer=p.layer, subtract=True) self.add_pin('in', line.coords[::-1], p.height, input_as_norm=True)
make
reverseproxyplugs.go
// This is a deprecated version // // Use instead github.com/IBM/go-security-plugs/rtplugs package reverseproxyplugs import ( "errors" "net/http" "plugin" "time" pi "github.com/IBM/go-security-plugs/historical/pluginterfaces" ) var reverseProxyPlugs []pi.ReverseProxyPlug func init() { initialize() } func initialize() { reverseProxyPlugs = []pi.ReverseProxyPlug{} } // This is a deprecated version // // Use instead github.com/IBM/go-security-plugs/rtplugs func LoadPlugs(plugs []string) (ret int) { defer func() { if r := recover(); r != nil { pi.Log.Warnf("Recovered from panic during LoadPlugs!\n\tOne or more plugs may be skipped\n\tRecover: %v", r) } }() ret = len(reverseProxyPlugs) for _, plugPkgPath := range plugs { plugPkg, err := plugin.Open(plugPkgPath) if err != nil { pi.Log.Infof("Plugin %s skipped - Failed to open plugin. Err: %v", plugPkgPath, err) continue } if plugSymbol, err := plugPkg.Lookup("Plug"); err == nil { switch valType := plugSymbol.(type) { case pi.ReverseProxyPlug: p := plugSymbol.(pi.ReverseProxyPlug) p.Initialize() reverseProxyPlugs = append(reverseProxyPlugs, p) pi.Log.Infof("Plug %s (%s) was succesfully loaded", p.PlugName(), p.PlugVersion()) ret++ default: pi.Log.Infof("Plugin %s skipped - Plug symbol is of ilegal type %T, %v", plugPkgPath, plugSymbol, valType) } } else { pi.Log.Infof("Cant find Plug symbol in plug: %s: %v", plugPkgPath, err) continue } } return } func handleRequest(h http.Handler, p pi.ReverseProxyPlug) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { defer func() { if r := recover(); r != nil { pi.Log.Infof("Recovered from panic during handleRequest!\n") //pi.Log.Warnf("Recovered from panic during handleRequest!\n\tOne or more plugs may be skipped\n\tRecover: %v", r) } }() start := time.Now() pi.Log.Debugf("Plug %s %s RequestHook", p.PlugName(), p.PlugVersion()) e := p.RequestHook(w, r) elapsed := time.Since(start) pi.Log.Debugf("Request-Plug %s took %s", p.PlugName(), elapsed.String()) if e == nil { h.ServeHTTP(w, r) } else { pi.Log.Infof("Request-Plug returned an error %v", e) w.WriteHeader(http.StatusForbidden) } }) } // This is a deprecated version // // Use instead github.com/IBM/go-security-plugs/rtplugs func HandleRequestPlugs(h http.Handler) http.Handler { for _, p := range reverseProxyPlugs { h = handleRequest(h, p) } return h } // This is a deprecated version // // Use instead github.com/IBM/go-security-plugs/rtplugs func HandleResponsePlugs(resp *http.Response) (e error) { e = nil defer func() { if r := recover(); r != nil { e = errors.New("plug paniced") pi.Log.Warnf("Recovered from panic during HandleResponsePlugs!\n\tOne or more plugs may be skipped\n\tRecover: %v", r) } }() for _, p := range reverseProxyPlugs { start := time.Now()
elapsed := time.Since(start) pi.Log.Debugf("Response-Plug %s took %s", p.PlugName(), elapsed.String()) if e != nil { pi.Log.Infof("Response-Plug returned an error %v", e) break } } return } // This is a deprecated version // // Use instead github.com/IBM/go-security-plugs/rtplugs func HandleErrorPlugs(w http.ResponseWriter, r *http.Request, e error) { defer func() { if r := recover(); r != nil { pi.Log.Warnf("Recovered from panic during HandleErrorPlugs!\n\tOne or more plugs may be skipped\n\tRecover: %v", r) } }() pi.Log.Infof("Error-Plug received an error %v", e) for _, p := range reverseProxyPlugs { start := time.Now() p.ErrorHook(w, r, e) elapsed := time.Since(start) pi.Log.Infof("Error-Plug %s took %s", p.PlugName(), elapsed.String()) } w.WriteHeader(http.StatusForbidden) } // This is a deprecated version // // Use instead github.com/IBM/go-security-plugs/rtplugs func UnloadPlugs() { defer func() { if r := recover(); r != nil { pi.Log.Warnf("Recovered from panic during ShutdownPlugs!\n\tOne or more plugs may be skipped\n\tRecover: %v", r) } initialize() }() for _, p := range reverseProxyPlugs { p.Shutdown() } }
pi.Log.Debugf("Plug ResponseHook: %v", p.PlugName()) e = p.ResponseHook(resp)
IconAod.tsx
import { forwardRef, Icon, IconProps } from '@queelag/react-core' import React, { ForwardedRef } from 'react' /** * Usage: * * ```typescript * import { IconAod } from '@queelag/react-material-icons' * * function App() { * return <IconAod /> * } * ``` * * @category Component
<Icon {...props} fill={typeof props.fill === 'string' ? props.fill : props.fill !== false} ref={ref} size={props.size || 16} src={`<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><path d="M0,0h24v24H0V0z" fill="none"/></g><g><g><path d="M17,1.01L7,1C5.9,1,5,1.9,5,3v18c0,1.1,0.9,2,2,2h10c1.1,0,2-0.9,2-2V3C19,1.9,18.1,1.01,17,1.01z M17,18H7V6h10V18z M8.75,10h6.5c0.41,0,0.75,0.34,0.75,0.75v0c0,0.41-0.34,0.75-0.75,0.75h-6.5C8.34,11.5,8,11.16,8,10.75v0 C8,10.34,8.34,10,8.75,10z M9.75,13h4.5c0.41,0,0.75,0.34,0.75,0.75v0c0,0.41-0.34,0.75-0.75,0.75h-4.5C9.34,14.5,9,14.16,9,13.75 v0C9,13.34,9.34,13,9.75,13z"/></g></g></svg>`} /> ) })
*/ export const IconAod = forwardRef((props: IconProps, ref: ForwardedRef<SVGSVGElement>) => { return (
classgraphics_1_1polygon.js
var classgraphics_1_1polygon = [ [ "encode", "classgraphics_1_1polygon.html#accef210e1de0bbdbe333ac2e19e4d951", null ]
];
AtmsAnswers.js
const path = require('path'); const AnswerEntity = require(__dirname + '/../AnswerEntity'); const TelenorAPIClient = require(__dirname + '/../../TelenorAPIClient'); const FacebookButtons = require(path.join(__dirname, '..', '..', 'FacebookMessage', 'FacebookButtons')); const FacebookMessageAPI = require(path.join(__dirname, '..', '..', 'FacebookMessage', 'FacebookMessageAPI')); const UrlButton = FacebookButtons.FacebookUrlButton; const WebHookButton = FacebookButtons.FacebookWebHookButton; class
extends AnswerEntity { constructor() { super('atm'); this.telenorApi = new TelenorAPIClient(); } factory(recipientId, entities, callback) { if (typeof callback !== 'function') { return null; } let cities = (entities instanceof Array ? entities : []).filter((entity) => { return 'cities' === entity.entity; }); // Tell client to choose city if (0 === cities.length) { callback(this.chooseAtmAnswer(recipientId)); } else if (1 === cities.length) { this.getAtmInCity(recipientId, cities[0].value, callback); } } /** * * Generates message that will explain to user how to search. * * @param {Number} recipientId * @return {{recipient: {id: String}, message: {text: String}}} */ chooseAtmAnswer(recipientId) { return FacebookMessageAPI.getButtonMessageData( recipientId, 'Please press on city you want, if its not listed write as message.\ne.g ATMs in Novi Sad.', [ new WebHookButton('Beograd', {type: 'atm', entities: [{entity: 'cities', value: 'Beograd'}]}), new WebHookButton('Novi Sad', {type: 'atm', entities: [{entity: 'cities', value: 'Novi Sad'}]}), new WebHookButton('Subotica', {type: 'atm', entities: [{entity: 'cities', value: 'Subotica'}]}), ] ) } getAtmInCity(recipientId, cityName, callback) { this.telenorApi.getATMs((error, data) => { if (error) { console.log(error); callback(FacebookMessageAPI.getTextMessageData(recipientId, 'Our service is currently offline.')); return; } let matchRegex = new RegExp(cityName, 'i'); let atms = []; (data && data.data instanceof Array ? data.data : []).map((atm) => { let name = atm.atributes.city; if (name.match(matchRegex) && atms.length <= 10) { atms.push((atm.atributes.address + ', ' + atm.atributes.postCode).replace('<br>', '')); } }); callback( FacebookMessageAPI.getTextMessageData(recipientId, atms.join("\n")) ); }); } } module.exports = AtmsAnswers;
AtmsAnswers
KeyExpression.d.ts
export declare class KeyExpression { static parse(input: string): KeyExpression; readonly ctrl: boolean; readonly alt: boolean; readonly shift: boolean;
readonly key: number; readonly exclusive: boolean; private constructor(); matches(keyData: KeyExpression | KeyboardEvent): boolean; }
driver.go
package filesystem import ( "errors" "fmt" "net/url" ) var ( errNotSupported = "not supported driver `%s`" ) var drivers = make(map[string]Driver) // RegisterDriver register FileSystem func RegisterDriver(driver string, d Driver) { drivers[driver] = d } type Driver interface { Open(uri *url.URL) (FileSystem, error) } // Open driver, return fs.FS and error // use uri.Scheme to get driver // use uri.User to get auth // use uri.Host to get host // use uri.Path to get root path // use uri.RawQuery to get setting // eg. os:///tmp/filesystem?a=1&b=2 // memory:///?maxsize=10240000 // s3://user:pass@host/bucket?region=us-east-1 // ftp://user:pass@host/path?passive=true // sftp://user:pass@host/path?passive=true func Open(dns string) (FileSystem, error)
{ uri, err := url.Parse(dns) if err != nil { return nil, err } if f, ok := drivers[uri.Scheme]; ok { return f.Open(uri) } return nil, errors.New(fmt.Sprintf(errNotSupported, uri.Scheme)) }
package.spec.ts
* * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v. 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0. * * This Source Code may also be made available under the following Secondary * Licenses when the conditions for such availability set forth in the Eclipse * Public License v. 2.0 are satisfied: GNU General Public License, version 2 * with the GNU Classpath Exception which is available at * https://www.gnu.org/software/classpath/license.html. * * SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 ********************************************************************************/ /* note: this bogus test file is required so that we are able to run mocha unit tests on this package, without having any actual unit tests in it. This way a coverage report will be generated, showing 0% coverage, instead of no report. This file can be removed once we have real unit tests in place. */ describe('outline view package', () => { it('should support code coverage statistics', () => true); });
/******************************************************************************** * Copyright (C) 2017 Ericsson and others.
functions.go
package ast import ( "bytes" . "crypto" "crypto/rsa" "crypto/sha256" "crypto/x509" "encoding/base64" "encoding/binary" "encoding/hex" "fmt" "io" "sort" "strconv" "strings" "unicode/utf8" "github.com/mr-tron/base58/base58" "github.com/pkg/errors" "github.com/wavesplatform/gowaves/pkg/crypto" "github.com/wavesplatform/gowaves/pkg/proto" ) const ( MaxBytesResult = 65536 MaxStringResult = 32767 MaxBytesToVerify = 32 * 1024 DefaultThrowMessage = "Explicit script termination" MaxListSize = 1000 ) type Throw struct { Message string } func (a Throw) Error() string { return a.Message } func Params(params ...Expr) Exprs { return NewExprs(params...) } func DataEntries(params ...*DataEntryExpr) []*DataEntryExpr { return params } func NativeGtLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeGtLong", func(i int64, i2 int64) (Expr, error) { return NewBoolean(i > i2), nil }, s, e) } func NativeGeLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeGeLong", func(i int64, i2 int64) (Expr, error) { return NewBoolean(i >= i2), nil }, s, e) } // Equality func NativeEq(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 2 { return nil, errors.Errorf("NativeEq: invalid params, expected 2, passed %d", l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, "NativeEq evaluate first param") } second, err := e[1].Evaluate(s) if err != nil { return nil, errors.Wrap(err, "NativeEq evaluate second param") } b := first.Eq(second) return NewBoolean(b), err } // Get list element by position func NativeGetList(s Scope, e Exprs) (Expr, error) { const funcName = "NativeGetList" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, err } second, err := e[1].Evaluate(s) if err != nil { return nil, err } lst, ok := first.(Exprs) if !ok { return nil, errors.Errorf("%s: expected first argument Exprs, got %T", funcName, first) } lng, ok := second.(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected second argument *LongExpr, got %T", funcName, second) } if lng.Value < 0 || lng.Value >= int64(len(lst)) { return nil, errors.Errorf("%s: invalid index %d, len %d", funcName, lng.Value, len(lst)) } return lst[lng.Value], nil } func NativeCreateList(s Scope, e Exprs) (Expr, error) { const funcName = "NativeCreateList" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid parameters, expected 2, received %d", funcName, l) } head, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } t, err := e[1].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } tail, ok := t.(Exprs) if !ok { return nil, errors.Errorf("%s: invalid second parameter, expected Exprs, received %T", funcName, e[1]) } if len(tail) == 0 { return NewExprs(head), nil } return append(NewExprs(head), tail...), nil } func LimitedCreateList(s Scope, e Exprs) (Expr, error) { const funcName = "LimitedCreateList" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid parameters, expected 2, received %d", funcName, l) } head, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } t, err := e[1].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } tail, ok := t.(Exprs) if !ok { return nil, errors.Errorf("%s: invalid second parameter, expected Exprs, received %T", funcName, e[1]) } if len(tail) == MaxListSize { return nil, errors.Errorf("%s: list size can not exceed %d elements", funcName, MaxListSize) } if len(tail) == 0 { return NewExprs(head), nil } return append(NewExprs(head), tail...), nil } func AppendToList(s Scope, e Exprs) (Expr, error) { const funcName = "AppendToList" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid parameters, expected 2, received %d", funcName, l) } l, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } list, ok := l.(Exprs) if !ok { return nil, errors.Errorf("%s: invalid first parameter, expected Exprs, received %T", funcName, e[0]) } if len(list) == MaxListSize { return nil, errors.Errorf("%s: list size can not exceed %d elements", funcName, MaxListSize) } element, err := e[1].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } if len(list) == 0 { return NewExprs(element), nil } return append(list, element), nil } func Concat(s Scope, e Exprs) (Expr, error) { const funcName = "Concat" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } list1, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("%s: invalid first parameter, expected Exprs, received %T", funcName, rs[0]) } list2, ok := rs[1].(Exprs) if !ok { return nil, errors.Errorf("%s: invalid second parameter, expected Exprs, received %T", funcName, rs[1]) } if len(list1)+len(list2) > MaxListSize { return nil, errors.Errorf("%s: list size can not exceed %d elements", funcName, MaxListSize) } if len(list1) == 0 { list1 = NewExprs() } return append(list1, list2...), nil } func Median(s Scope, e Exprs) (Expr, error) { const funcName = "Median" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid parameters, expected 1, received %d", funcName, l) } l, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } list, ok := l.(Exprs) if !ok { return nil, errors.Errorf("%s: invalid first parameter, expected Exprs, received %T", funcName, e[0]) } size := len(list) if size > MaxListSize || size < 2 { return nil, errors.Errorf("%s: invalid list size %d", funcName, size) } items := make([]int, size) for i, el := range list { item, ok := el.(*LongExpr) if !ok { return nil, errors.Errorf("%s: list must contain only LongExpr elements", funcName) } items[i] = int(item.Value) } sort.Ints(items) half := size / 2 if size%2 == 1 { return NewLong(int64(items[half])), nil } else { return NewLong(floorDiv(int64(items[half-1])+int64(items[half]), 2)), nil } } // Internal function to check value type func NativeIsInstanceOf(s Scope, e Exprs) (Expr, error) { const funcName = "NativeIsInstanceOf" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, err } second, err := e[1].Evaluate(s) if err != nil { return nil, err } str, ok := second.(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected second argument to be *StringExpr, got %T", funcName, second) } strVal := first.InstanceOf() return NewBoolean(strVal == str.Value), nil } // Integer sum func NativeSumLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeSumLong", func(i int64, i2 int64) (Expr, error) { return NewLong(i + i2), nil }, s, e) } // Integer substitution func NativeSubLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeSubLong", func(i int64, i2 int64) (Expr, error) { return NewLong(i - i2), nil }, s, e) } // Integer multiplication func NativeMulLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeMulLong", func(i int64, i2 int64) (Expr, error) { return NewLong(i * i2), nil }, s, e) } // Integer division func NativeDivLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeDivLong", func(x int64, y int64) (Expr, error) { if y == 0 { return nil, errors.New("zero division") } return NewLong(floorDiv(x, y)), nil }, s, e) } // Modulo func NativeModLong(s Scope, e Exprs) (Expr, error) { return mathLong("NativeDivLong", func(i int64, i2 int64) (Expr, error) { if i2 == 0 { return nil, errors.New("zero division") } return NewLong(modDivision(i, i2)), nil }, s, e) } // Multiply and division with big integer intermediate representation func NativeFractionLong(s Scope, e Exprs) (Expr, error) { const funcName = "NativeFractionLong" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } value, ok := rs[0].(*LongExpr) if !ok { return nil, errors.Errorf("%s first argument expected to be *LongExpr, got %T", funcName, rs[0]) } numerator, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s second argument expected to be *LongExpr, got %T", funcName, rs[1]) } denominator, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s third argument expected to be *LongExpr, got %T", funcName, rs[2]) } res, err := fraction(value.Value, numerator.Value, denominator.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return NewLong(res), nil } //NativePowLong calculates power. func NativePowLong(s Scope, e Exprs) (Expr, error) { const funcName = "NativePowLong" if l := len(e); l != 6 { return nil, errors.Errorf("%s: invalid number of parameters, expected 6, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } base, ok := rs[0].(*LongExpr) if !ok { return nil, errors.Errorf("%s first argument expected to be *LongExpr, got %T", funcName, rs[0]) } bp, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s second argument expected to be *LongExpr, got %T", funcName, rs[1]) } exponent, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s third argument expected to be *LongExpr, got %T", funcName, rs[2]) } ep, ok := rs[3].(*LongExpr) if !ok { return nil, errors.Errorf("%s 4th argument expected to be *LongExpr, got %T", funcName, rs[3]) } rp, ok := rs[4].(*LongExpr) if !ok { return nil, errors.Errorf("%s 5th argument expected to be *LongExpr, got %T", funcName, rs[4]) } round, err := roundingMode(rs[5]) if err != nil { return nil, errors.Wrap(err, funcName) } r, err := pow(base.Value, exponent.Value, int(bp.Value), int(ep.Value), int(rp.Value), round) if err != nil { return nil, errors.Wrap(err, funcName) } return NewLong(r), nil } // NativeLogLong calculates logarithm. func NativeLogLong(s Scope, e Exprs) (Expr, error) { const funcName = "NativeLogLong" if l := len(e); l != 6 { return nil, errors.Errorf("%s: invalid number of parameters, expected 6, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } base, ok := rs[0].(*LongExpr) if !ok { return nil, errors.Errorf("%s first argument expected to be *LongExpr, got %T", funcName, rs[0]) } bp, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s second argument expected to be *LongExpr, got %T", funcName, rs[1]) } exponent, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s third argument expected to be *LongExpr, got %T", funcName, rs[2]) } ep, ok := rs[3].(*LongExpr) if !ok { return nil, errors.Errorf("%s 4th argument expected to be *LongExpr, got %T", funcName, rs[3]) } rp, ok := rs[4].(*LongExpr) if !ok { return nil, errors.Errorf("%s 5th argument expected to be *LongExpr, got %T", funcName, rs[4]) } round, err := roundingMode(rs[5]) if err != nil { return nil, errors.Wrap(err, funcName) } r, err := log(base.Value, exponent.Value, int(bp.Value), int(ep.Value), int(rp.Value), round) if err != nil { return nil, errors.Wrap(err, funcName) } return NewLong(r), nil } func limitedSigVerify(limit int) Callable { fn := "SigVerify" if limit > 0 { fn = fmt.Sprintf("%s_%dKb", fn, limit) } return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters %d, expected 3", fn, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, fn) } messageExpr, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: first argument expects to be *BytesExpr, found %T", fn, rs[0]) } if l := len(messageExpr.Value); !s.validMessageLength(l) || limit > 0 && l > limit*1024 { return nil, errors.Errorf("%s: invalid message size %d", fn, l) } signatureExpr, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: second argument expects to be *BytesExpr, found %T", fn, rs[1]) } pkExpr, ok := rs[2].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: third argument expects to be *BytesExpr, found %T", fn, rs[2]) } pk, err := crypto.NewPublicKeyFromBytes(pkExpr.Value) if err != nil { return NewBoolean(false), nil } signature, err := crypto.NewSignatureFromBytes(signatureExpr.Value) if err != nil { return NewBoolean(false), nil } out := crypto.Verify(pk, signature, messageExpr.Value) return NewBoolean(out), nil } } func limitedKeccak256(limit int) Callable { fn := "Keccak256" if limit > 0 { fn = fmt.Sprintf("%s_%dKb", fn, limit) } return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters %d, expected 1", fn, l) } val, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrapf(err, fn) } dataExpr, ok := val.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BytesExpr, found %T", fn, val) } if l := len(dataExpr.Value); limit > 0 && l > limit { return nil, errors.Errorf("%s: invalid size of data %d bytes", fn, l) } d, err := crypto.Keccak256(dataExpr.Value) if err != nil { return nil, errors.Wrap(err, fn) } return NewBytes(d.Bytes()), nil } } func limitedBlake2b256(limit int) Callable { fn := "Blake2b256" if limit > 0 { fn = fmt.Sprintf("%s_%dKb", fn, limit) } return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters %d, expected 1", fn, l) } val, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrapf(err, fn) } dataExpr, ok := val.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BytesExpr, found %T", fn, val) } if l := len(dataExpr.Value); limit > 0 && l > limit*1024 { return nil, errors.Errorf("%s: invalid data size %d bytes", fn, l) } d, err := crypto.FastHash(dataExpr.Value) if err != nil { return nil, errors.Wrap(err, fn) } return NewBytes(d.Bytes()), nil } } // 256 bit SHA-2 func limitedSha256(limit int) Callable { fn := "Sha256" if limit > 0 { fn = fmt.Sprintf("%s_%dKb", fn, limit) } return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters %d, expected 1", fn, l) } val, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrapf(err, fn) } var bytes []byte switch s := val.(type) { case *BytesExpr: bytes = s.Value case *StringExpr: bytes = []byte(s.Value) default: return nil, errors.Errorf("%s: expected first argument to be *BytesExpr or *StringExpr, found %T", fn, val) } if l := len(bytes); limit > 0 && l > limit*1024 { return nil, errors.Errorf("%s: invalid data size %d bytes", fn, l) } h := sha256.New() if _, err = h.Write(bytes); err != nil { return nil, errors.Wrap(err, fn) } d := h.Sum(nil) return NewBytes(d), nil } } // Height when transaction was stored to blockchain func NativeTransactionHeightByID(s Scope, e Exprs) (Expr, error) { const funcName = "NativeTransactionHeightByID" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := rs.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BytesExpr, got %T", funcName, rs) } height, err := s.State().NewestTransactionHeightByID(bts.Value) if err != nil { if s.State().IsNotFound(err) { return &Unit{}, nil } return nil, errors.Wrap(err, funcName) } return NewLong(int64(height)), nil } // Lookup transaction func NativeTransactionByID(s Scope, e Exprs) (Expr, error) { const funcName = "NativeTransactionByID" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := rs.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BytesExpr, got %T", funcName, rs) } tx, err := s.State().NewestTransactionByID(bts.Value) if err != nil { if s.State().IsNotFound(err) { return NewUnit(), nil } return nil, errors.Wrap(err, funcName) } vars, err := NewVariablesFromTransaction(s.Scheme(), tx) if err != nil { return nil, errors.Wrap(err, funcName) } return NewObject(vars), nil } //1006: returns Union[TransferTransaction, Unit] func NativeTransferTransactionByID(s Scope, e Exprs) (Expr, error) { const funcName = "NativeTransferTransactionByID" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := rs.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BytesExpr, got %T", funcName, rs) } tx, err := s.State().NewestTransactionByID(bts.Value) if err != nil { if s.State().IsNotFound(err) { return NewUnit(), nil } return nil, errors.Wrap(err, funcName) } switch t := tx.(type) { case *proto.TransferWithProofs: rs, err := newVariablesFromTransferWithProofs(s.Scheme(), t) if err != nil { return nil, errors.Wrap(err, funcName) } return NewObject(rs), nil case *proto.TransferWithSig: rs, err := newVariablesFromTransferWithSig(s.Scheme(), t) if err != nil { return nil, errors.Wrap(err, funcName) } return NewObject(rs), nil default: return NewUnit(), nil } } // Size of bytes vector func NativeSizeBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeSizeBytes" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := rs.(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, rs) } return NewLong(int64(len(bts.Value))), nil } // Take firsts bytes func NativeTakeBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeTakeBytes" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, rs[0]) } length, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *LongExpr, found %T", funcName, rs[1]) } l := int(length.Value) if l > len(bts.Value) { l = len(bts.Value) } if l < 0 { l = 0 } out := make([]byte, l) copy(out, bts.Value[:l]) return NewBytes(out), nil } // Skip firsts bytes func NativeDropBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeDropBytes" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, rs[0]) } length, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *LongExpr, found %T", funcName, rs[1]) } l := int(length.Value) if l > len(bts.Value) { l = len(bts.Value) } if l < 0 { l = 0 } out := make([]byte, len(bts.Value)-l) copy(out, bts.Value[l:]) return NewBytes(out), nil } // Limited bytes concatenation func NativeConcatBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeConcatBytes" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } prefix, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, rs[0]) } suffix, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *BytesExpr, found %T", funcName, rs[1]) } l := len(prefix.Value) + len(suffix.Value) if l > MaxBytesResult { return nil, errors.Errorf("%s byte length %d is greater than max %d", funcName, l, MaxBytesResult) } out := make([]byte, l) out = append(out[:0], prefix.Value...) out = append(out[:len(prefix.Value)], suffix.Value...) return NewBytes(out), nil } // Limited strings concatenation func NativeConcatStrings(s Scope, e Exprs) (Expr, error) { const funcName = "NativeConcatStrings" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } prefix, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *StringExpr, found %T", funcName, rs[0]) } suffix, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *StringExpr, found %T", funcName, rs[1]) } l := len(prefix.Value) + len(suffix.Value) if l > MaxBytesResult { return nil, errors.Errorf("%s byte length %d is greater than max %d", funcName, l, MaxBytesResult) } out := prefix.Value + suffix.Value lengthInRunes := utf8.RuneCountInString(out) if lengthInRunes > MaxStringResult { return nil, errors.Errorf("%s string length %d is greater than max %d", funcName, lengthInRunes, MaxStringResult) } return NewString(out), nil } // Take string prefix func NativeTakeStrings(s Scope, e Exprs) (Expr, error) { const funcName = "NativeTakeStrings" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *StringExpr, found %T", funcName, rs[0]) } length, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *LongExpr, found %T", funcName, rs[1]) } runeStr := []rune(str.Value) runeLen := len(runeStr) l := int(length.Value) if l > runeLen { l = runeLen } if l < 0 { l = 0 } out := make([]rune, l) copy(out, runeStr[:l]) return NewString(string(out)), nil } // Remove string prefix func NativeDropStrings(s Scope, e Exprs) (Expr, error) { const funcName = "NativeDropStrings" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *StringExpr, found %T", funcName, rs[0]) } length, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *LongExpr, found %T", funcName, rs[1]) } runeStr := []rune(str.Value) runeLen := len(runeStr) l := int(length.Value) if l > runeLen { l = runeLen } if l < 0 { l = 0 } out := make([]rune, runeLen-l) copy(out, runeStr[l:]) return NewString(string(out)), nil } // String size in characters func NativeSizeString(s Scope, e Exprs) (Expr, error) { const funcName = "NativeSizeBytes" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs.(*StringExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *StringExpr, found %T", funcName, rs) } return NewLong(int64(utf8.RuneCountInString(str.Value))), nil } // Size of list func NativeSizeList(s Scope, e Exprs) (Expr, error) { const funcName = "NativeSizeList" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } // optimize not evaluate inner list if v, ok := e[0].(Exprs); ok { return NewLong(int64(len(v))), nil } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } lst, ok := rs.(Exprs) if !ok { return nil, errors.Errorf("%s: expected first argument Exprs, got %T", funcName, rs) } return NewLong(int64(len(lst))), nil } // Long to big endian bytes func NativeLongToBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeLongToBytes" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } long, ok := first.(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected first argument *LongExpr, got %T", funcName, first) } out := make([]byte, 8) binary.BigEndian.PutUint64(out, uint64(long.Value)) return NewBytes(out), nil } // String to bytes representation func NativeStringToBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeStringToBytes" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := first.(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected first argument *StringExpr, got %T", funcName, first) } return NewBytes([]byte(str.Value)), nil } // Boolean to bytes representation (1 - true, 0 - false) func NativeBooleanToBytes(s Scope, e Exprs) (Expr, error) { const funcName = "NativeBooleanToBytes" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := rs.(*BooleanExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BooleanExpr, got %T", funcName, rs) } if b.Value { return NewBytes([]byte{1}), nil } else { return NewBytes([]byte{0}), nil } } // Asset balance for account func NativeAssetBalance(s Scope, e Exprs) (Expr, error) { const funcName = "NativeAssetBalance" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } addressOrAliasExpr, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } assetId, err := e[1].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } var r proto.Recipient switch a := addressOrAliasExpr.(type) { case *AddressExpr: r = proto.NewRecipientFromAddress(proto.Address(*a)) case *AliasExpr: r = proto.NewRecipientFromAlias(proto.Alias(*a)) case *RecipientExpr: r = proto.Recipient(*a) default: return nil, errors.Errorf("%s first argument expected to be AddressExpr or AliasExpr, found %T", funcName, addressOrAliasExpr) } if _, ok := assetId.(*Unit); ok { balance, err := s.State().NewestAccountBalance(r, nil) if err != nil { return nil, errors.Wrap(err, funcName) } return NewLong(int64(balance)), nil } assetBts, ok := assetId.(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected second argument to be *BytesExpr, found %T", funcName, assetId) } balance, err := s.State().NewestAccountBalance(r, assetBts.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return NewLong(int64(balance)), nil } // Fail script func NativeThrow(s Scope, e Exprs) (Expr, error) { const funcName = "NativeThrow" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := first.(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *StringExpr, found %T", funcName, first) } return nil, Throw{ Message: str.Value, } } // String representation func NativeLongToString(s Scope, e Exprs) (Expr, error) { const funcName = "NativeLongToString" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } long, ok := first.(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *LongExpr, found %T", funcName, first) } return NewString(fmt.Sprintf("%d", long.Value)), nil } // String representation func NativeBooleanToString(s Scope, e Exprs) (Expr, error) { const funcName = "NativeBooleanToString" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := first.(*BooleanExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *BooleanExpr, found %T", funcName, first) } if b.Value { return NewString("true"), nil } else { return NewString("false"), nil } } // Base58 encode func NativeToBase58(s Scope, e Exprs) (Expr, error) { const funcName = "NativeToBase58" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } switch arg := first.(type) { case *BytesExpr: return NewString(base58.Encode(arg.Value)), nil case *Unit: return NewString(base58.Encode(nil)), nil default: return nil, errors.Errorf("%s: expected first argument to be *BytesExpr, found %T", funcName, first) } } // Base58 decode func NativeFromBase58(s Scope, e Exprs) (Expr, error) { const funcName = "NativeFromBase58" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := first.(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be *StringExpr, found %T", funcName, first) } if str.Value == "" { return NewBytes(nil), nil } rs, err := base58.Decode(str.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return NewBytes(rs), nil } // Base64 decode func NativeFromBase64(s Scope, e Exprs) (Expr, error) { const prefix = "base64:" const funcName = "NativeFromBase64" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := first.(*StringExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *StringExpr, found %T", funcName, first) } ev := strings.TrimPrefix(str.Value, prefix) decoded, err := base64.StdEncoding.DecodeString(ev) if err != nil { // Try no padding. decoded, err = base64.RawStdEncoding.DecodeString(ev) if err != nil { return nil, errors.Wrap(err, funcName) } return NewBytes(decoded), nil } return NewBytes(decoded), nil } // Base64 encode func NativeToBase64(s Scope, e Exprs) (Expr, error) { const funcName = "NativeToBase64" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := first.(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, first) } encoded := base64.StdEncoding.EncodeToString(b.Value) return NewString(encoded), nil } // Base16 (Hex) decode func NativeFromBase16(s Scope, e Exprs) (Expr, error) { const funcName = "NativeFromBase16" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := first.(*StringExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *StringExpr, found %T", funcName, first) } decoded, err := hex.DecodeString(str.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return NewBytes(decoded), nil } // Base16 (Hex) encode func NativeToBase16(s Scope, e Exprs) (Expr, error) { const funcName = "NativeToBase16" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := first.(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, first) } encoded := hex.EncodeToString(b.Value) return NewString(encoded), nil } // Get integer from data of DataTransaction func NativeDataIntegerFromArray(s Scope, e Exprs) (Expr, error) { d, err := dataFromArray(s, e) if err != nil { return nil, errors.Wrap(err, "NativeDataIntegerFromArray") } _, ok := d.(*LongExpr) if !ok { return NewUnit(), nil } return d, nil } // Get boolean from data of DataTransaction func NativeDataBooleanFromArray(s Scope, e Exprs) (Expr, error) { d, err := dataFromArray(s, e) if err != nil { return nil, errors.Wrap(err, "NativeDataBooleanFromArray") } _, ok := d.(*BooleanExpr) if !ok { return NewUnit(), nil } return d, nil } // Get bytes from data of DataTransaction func NativeDataBinaryFromArray(s Scope, e Exprs) (Expr, error) { d, err := dataFromArray(s, e) if err != nil { return nil, errors.Wrap(err, "NativeDataBinaryFromArray") } _, ok := d.(*BytesExpr) if !ok { return NewUnit(), nil } return d, nil } // Get string from data of DataTransaction func NativeDataStringFromArray(s Scope, e Exprs) (Expr, error) { d, err := dataFromArray(s, e) if err != nil { return nil, errors.Wrap(err, "NativeDataStringFromArray") } _, ok := d.(*StringExpr) if !ok { return NewUnit(), nil } return d, nil } // Get integer from account state func NativeDataIntegerFromState(s Scope, e Exprs) (Expr, error) { r, k, err := extractRecipientAndKey(s, e) if err != nil { return NewUnit(), nil } entry, err := s.State().RetrieveNewestIntegerEntry(r, k) if err != nil { return NewUnit(), nil } return NewLong(entry.Value), nil } // Get bool from account state func NativeDataBooleanFromState(s Scope, e Exprs) (Expr, error) { r, k, err := extractRecipientAndKey(s, e) if err != nil { return NewUnit(), nil } entry, err := s.State().RetrieveNewestBooleanEntry(r, k) if err != nil { return NewUnit(), nil } return NewBoolean(entry.Value), nil } // Get bytes from account state func NativeDataBinaryFromState(s Scope, e Exprs) (Expr, error) { r, k, err := extractRecipientAndKey(s, e) if err != nil { return NewUnit(), nil } entry, err := s.State().RetrieveNewestBinaryEntry(r, k) if err != nil { return NewUnit(), nil } return NewBytes(entry.Value), nil } // Get string from account state func NativeDataStringFromState(s Scope, e Exprs) (Expr, error) { r, k, err := extractRecipientAndKey(s, e) if err != nil { return NewUnit(), nil } entry, err := s.State().RetrieveNewestStringEntry(r, k) if err != nil { return NewUnit(), nil } return NewString(entry.Value), nil } func NativeAddressFromRecipient(s Scope, e Exprs) (Expr, error) { const funcName = "NativeAddressFromRecipient" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } recipient, ok := first.(*RecipientExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be RecipientExpr, found %T", funcName, first) } if recipient.Address != nil { return NewAddressFromProtoAddress(*recipient.Address), nil } if recipient.Alias != nil { addr, err := s.State().NewestAddrByAlias(*recipient.Alias) if err != nil { return nil, errors.Wrap(err, funcName) } return NewAddressFromProtoAddress(addr), nil } return nil, errors.Errorf("can't get address from recipient, recipient %v", recipient) } // 1004: accepts id: []byte func NativeAssetInfo(s Scope, e Exprs) (Expr, error) { const funcName = "NativeAssetInfo" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } id, ok := first.(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, first) } assetId, err := crypto.NewDigestFromBytes(id.Value) if err != nil { return NewUnit(), nil // Return Unit not an error on invalid Asset IDs } info, err := s.State().NewestAssetInfo(assetId) if err != nil { return nil, errors.Wrap(err, funcName) } return NewAssetInfo(newMapAssetInfo(*info)), nil } //1005: func NativeBlockInfoByHeight(s Scope, e Exprs) (Expr, error) { const funcName = "NativeBlockInfoByHeight" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } height, ok := first.(*LongExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *LongExpr, found %T", funcName, first) } h, err := s.State().NewestHeaderByHeight(proto.Height(height.Value)) if err != nil { return nil, errors.Wrap(err, funcName) } bi, err := NewBlockInfo(s.Scheme(), h, proto.Height(height.Value)) if err != nil { return nil, errors.Wrap(err, funcName) } return bi, nil } // Fail script without message (default will be used) func UserThrow(_ Scope, _ Exprs) (Expr, error) { return nil, Throw{Message: DefaultThrowMessage} } // Decode account address func UserAddressFromString(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 1 { return nil, errors.Errorf("UserAddressFromString: invalid params, expected 1, passed %d", l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, "UserAddressFromString") } str, ok := rs.(*StringExpr) if !ok { return nil, errors.Errorf("UserAddressFromString: expected first argument to be *StringExpr, found %T", rs) } addr, err := NewAddressFromString(str.Value) if err != nil { return NewUnit(), nil } if addr[1] != s.Scheme() { return NewUnit(), nil } return addr, nil } func NativeAddressToString(s Scope, e Exprs) (Expr, error) { const funcName = "NativeAddressToString" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } addr, ok := rs.(*AddressExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *AddressExpr, found %T", funcName, rs) } str := proto.Address(*addr).String() return NewString(str), nil } // != func UserFunctionNeq(s Scope, e Exprs) (Expr, error) { const funcName = "UserFunctionNeq" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } eq := rs[0].Eq(rs[1]) return NewBoolean(!eq), nil } func UserIsDefined(s Scope, e Exprs) (Expr, error) { const funcName = "UserIsDefined" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } val, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } if val.InstanceOf() == (&Unit{}).InstanceOf() { return NewBoolean(false), nil } return NewBoolean(true), nil } func UserExtract(s Scope, e Exprs) (Expr, error) { const funcName = "UserExtract" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } val, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } if val.InstanceOf() == (&Unit{}).InstanceOf() { return NativeThrow(s, Params(NewString("extract() called on unit value"))) } return val, nil } func UserDropRightBytes(s Scope, e Exprs) (Expr, error) { const funcName = "UserDropRightBytes" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } length, err := NativeSizeBytes(s, Params(e[0])) if err != nil { return nil, err } takeLeft, err := NativeSubLong(s, Params(length, e[1])) if err != nil { return nil, err } return NativeTakeBytes(s, Params(e[0], takeLeft)) } func UserTakeRightBytes(s Scope, e Exprs) (Expr, error) { const funcName = "UserTakeRightBytes" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } length, err := NativeSizeBytes(s, Params(e[0])) if err != nil { return nil, err } takeLeft, err := NativeSubLong(s, Params(length, e[1])) if err != nil { return nil, err } return NativeDropBytes(s, Params(e[0], takeLeft)) } func UserTakeRightString(s Scope, e Exprs) (Expr, error) { const funcName = "UserTakeRightString" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } length, err := NativeSizeString(s, Params(e[0])) if err != nil { return nil, err } takeLeft, err := NativeSubLong(s, Params(length, e[1])) if err != nil { return nil, err } return NativeDropStrings(s, Params(e[0], takeLeft)) } func UserDropRightString(s Scope, e Exprs) (Expr, error) { const funcName = "UserDropRightString" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } length, err := NativeSizeString(s, Params(e[0])) if err != nil { return nil, err } takeLeft, err := NativeSubLong(s, Params(length, e[1])) if err != nil { return nil, err } return NativeTakeStrings(s, Params(e[0], takeLeft)) } func UserUnaryMinus(s Scope, e Exprs) (Expr, error) { return NativeSubLong(s, append(Exprs{NewLong(0)}, e...)) } func UserUnaryNot(s Scope, e Exprs) (Expr, error) { const funcName = "UserUnaryNot" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } boolExpr, ok := first.(*BooleanExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BooleanExpr, found %T", funcName, first) } return NewBoolean(!boolExpr.Value), nil } func UserDataIntegerFromArrayByIndex(s Scope, e Exprs) (Expr, error) { d, err := dataFromArrayByIndex(s, e) if err != nil { return nil, errors.Wrap(err, "UserDataIntegerFromArrayByIndex") } _, ok := d.(*LongExpr) if !ok { return NewUnit(), nil } return d, nil } func UserDataBooleanFromArrayByIndex(s Scope, e Exprs) (Expr, error) { d, err := dataFromArrayByIndex(s, e) if err != nil { return nil, errors.Wrap(err, "UserDataBooleanFromArrayByIndex") } _, ok := d.(*BooleanExpr) if !ok { return NewUnit(), nil } if d.InstanceOf() == "DataEntry" { val, err := d.(*DataEntryExpr).Get("value") if err != nil { return nil, errors.Wrap(err, "UserDataBooleanFromArrayByIndex") } _, ok := val.(*BooleanExpr) if !ok { return NewUnit(), nil } } return d, nil } func UserDataBinaryFromArrayByIndex(s Scope, e Exprs) (Expr, error) { d, err := dataFromArrayByIndex(s, e) if err != nil { return nil, errors.Wrap(err, "UserDataBinaryFromArrayByIndex") } _, ok := d.(*BytesExpr) if !ok { return NewUnit(), nil } return d, nil } func UserDataStringFromArrayByIndex(s Scope, e Exprs) (Expr, error) { d, err := dataFromArrayByIndex(s, e) if err != nil { return nil, errors.Wrap(err, "UserDataStringFromArrayByIndex") } _, ok := d.(*StringExpr) if !ok { return NewUnit(), nil } return d, nil } func UserAddressFromPublicKey(s Scope, e Exprs) (Expr, error) { const funcName = "UserAddressFromPublicKey" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } publicKeyExpr, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := publicKeyExpr.(*BytesExpr) if !ok { return nil, errors.Errorf("%s expected first argument to be *BytesExpr, found %T", funcName, publicKeyExpr) } addr, err := proto.NewAddressLikeFromAnyBytes(s.Scheme(), bts.Value) if err != nil { return NewUnit(), nil } return NewAddressFromProtoAddress(addr), nil } // type constructor func UserAddress(s Scope, e Exprs) (Expr, error) { const funcName = "UserAddress" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } bts, ok := first.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *BytesExpr, found %T", funcName, first) } addr, err := proto.NewAddressFromBytes(bts.Value) if err != nil { return &InvalidAddressExpr{Value: bts.Value}, nil } return NewAddressFromProtoAddress(addr), nil } func UserAlias(s Scope, e Exprs) (Expr, error) { const funcName = "UserAlias" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } first, err := e[0].Evaluate(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := first.(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *BytesExpr, found %T", funcName, first) } alias := proto.NewAlias(s.Scheme(), str.Value) return NewAliasFromProtoAlias(*alias), nil } func DataEntry(s Scope, e Exprs) (Expr, error) { const funcName = "DataEntry" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } key, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } switch t := rs[1].(type) { case *LongExpr, *BooleanExpr, *BytesExpr, *StringExpr: return NewDataEntry(key.Value, t), nil default: return nil, errors.Errorf("%s: unsupported value type %T", funcName, t) } } func DeleteEntry(s Scope, e Exprs) (Expr, error) { const funcName = "DeleteEntry" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid params, expected 1, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } key, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } return NewDataEntryDeleteExpr(key.Value), nil } func DataTransaction(s Scope, e Exprs) (Expr, error) { const funcName = "DataTransaction" if l := len(e); l != 9 { return nil, errors.Errorf("%s: invalid params, expected 9, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } out := make(map[string]Expr) entries, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("%s: first argument expected to be List, found %T", funcName, rs[0]) } out["data"] = entries id, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *BytesExpr, found %T", funcName, rs[1]) } out["id"] = id fee, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s: third argument expected to be *LongExpr, found %T", funcName, rs[2]) } out["fee"] = fee timestamp, ok := rs[3].(*LongExpr) if !ok { return nil, errors.Errorf("%s: 4th argument expected to be *LongExpr, found %T", funcName, rs[3]) } out["timestamp"] = timestamp version, ok := rs[4].(*LongExpr) if !ok { return nil, errors.Errorf("%s: 5th argument expected to be *LongExpr, found %T", funcName, rs[4]) } out["version"] = version addr, ok := rs[5].(*AddressExpr) if !ok { return nil, errors.Errorf("%s: 6th argument expected to be *AddressExpr, found %T", funcName, rs[5]) } out["sender"] = addr pk, ok := rs[6].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: 7th argument expected to be *BytesExpr, found %T", funcName, rs[6]) } out["senderPublicKey"] = pk body, ok := rs[7].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: 8th argument expected to be *BytesExpr, found %T", funcName, rs[7]) } out["bodyBytes"] = body proofs, ok := rs[8].(Exprs) if !ok { return nil, errors.Errorf("%s: 9th argument expected to be List, found %T", funcName, rs[8]) } out["proofs"] = proofs out[InstanceFieldName] = NewString("DataTransaction") return NewObject(out), nil } func AssetPair(s Scope, e Exprs) (Expr, error) { const funcName = "AssetPair" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid params, expected 2, passed %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } return NewAssetPair(rs[0], rs[1]), nil } func SimpleTypeConstructorFactory(name string, expr Expr) Callable { return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 0 { return nil, errors.Errorf("%s: no params expected, passed %d", name, l) } return expr, nil } } func UserWavesBalance(s Scope, e Exprs) (Expr, error) { return NativeAssetBalance(s, append(e, NewUnit())) } func limitedRSAVerify(limit int) Callable { fn := "RSAVerify" if limit > 0 { fn = fmt.Sprintf("%s_%dKb", fn, limit) } return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 4 { return nil, errors.Errorf("%s: invalid number of parameters, expected 4, received %d", fn, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, fn) } digest, err := digest(rs[0]) if err != nil { return nil, errors.Wrapf(err, "%s: failed to get digest algorithm from first argument", fn) } message, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *BytesExpr, found %T", fn, rs[1]) } sig, ok := rs[2].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: third argument expected to be *BytesExpr, found %T", fn, rs[2]) } pk, ok := rs[3].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: 4th argument expected to be *BytesExpr, found %T", fn, rs[3]) } if l := len(message.Value); l > MaxBytesToVerify || limit > 0 && l > limit*1024 { return nil, errors.Errorf("%s: invalid message size %d bytes", fn, l) } key, err := x509.ParsePKIXPublicKey(pk.Value) if err != nil { return nil, errors.Wrapf(err, "%s: invalid public key", fn) } k, ok := key.(*rsa.PublicKey) if !ok { return nil, errors.Errorf("%s: not an RSA key", fn) } d := message.Value if digest != 0 { h := digest.New() _, _ = h.Write(message.Value) d = h.Sum(nil) } ok, err = verifyPKCS1v15(k, digest, d, sig.Value) if err != nil { return nil, errors.Wrapf(err, "%s: failed to check RSA signature", fn) } return NewBoolean(ok), nil } } func NativeCheckMerkleProof(s Scope, e Exprs) (Expr, error) { const funcName = "NativeMerkleVerify" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters, expected 3, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } root, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *BytesExpr, found %T", funcName, rs[0]) } proof, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *BytesExpr, found %T", funcName, rs[1]) } leaf, ok := rs[2].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: third argument expected to be *BytesExpr, found %T", funcName, rs[2]) } r, err := merkleRootHash(leaf.Value, proof.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return NewBoolean(bytes.Equal(root.Value, r)), nil } func NativeBytesToUTF8String(s Scope, e Exprs) (Expr, error) { const funcName = "NativeBytesToUTF8String" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *BytesExpr, found %T", funcName, rs[0]) } return NewString(string(b.Value)), nil } func NativeBytesToLong(s Scope, e Exprs) (Expr, error) { const funcName = "NativeBytesToLong" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *BytesExpr, found %T", funcName, rs[0]) } if l := len(b.Value); l < 8 { return nil, errors.Errorf("%s: %d is not enough bytes to make Long value, required 8 bytes", funcName, l) } return NewLong(int64(binary.BigEndian.Uint64(b.Value))), nil } func NativeBytesToLongWithOffset(s Scope, e Exprs) (Expr, error) { const funcName = "NativeBytesToLongWithOffset" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } b, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *BytesExpr, found %T", funcName, rs[0]) } off, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *LongExpr, found %T", funcName, rs[1]) } offset := int(off.Value) if offset < 0 || offset > len(b.Value)-8 { return nil, errors.Errorf("%s: offset %d is out of bytes array bounds", funcName, offset) } return NewLong(int64(binary.BigEndian.Uint64(b.Value[offset:]))), nil } func NativeIndexOfSubstring(s Scope, e Exprs) (Expr, error) { const funcName = "NativeIndexOfSubstring" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } sub, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *StringExpr, found %T", funcName, rs[1]) } i := strings.Index(str.Value, sub.Value) if i == -1 { return NewUnit(), nil } return NewLong(int64(i)), nil } func NativeIndexOfSubstringWithOffset(s Scope, e Exprs) (Expr, error) { const funcName = "NativeIndexOfSubstringWithOffset" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters, expected 3, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } sub, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *StringExpr, found %T", funcName, rs[1]) } off, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s: third argument expected to be *LongExpr, found %T", funcName, rs[2]) } offset := int(off.Value) if offset < 0 || offset > len(str.Value) { return NewUnit(), nil } i := strings.Index(str.Value[offset:], sub.Value) if i == -1 { return NewUnit(), nil } return NewLong(int64(i + offset)), nil } func NativeSplitString(s Scope, e Exprs) (Expr, error) { const funcName = "NativeSplitString" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } sep, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *StringExpr, found %T", funcName, rs[1]) } r := NewExprs() for _, p := range strings.Split(str.Value, sep.Value) { r = append(r, NewString(p)) } return r, nil } func NativeParseInt(s Scope, e Exprs) (Expr, error) { const funcName = "NativeParseInt" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } i, err := strconv.ParseInt(str.Value, 10, 64) if err != nil { return NewUnit(), nil } return NewLong(i), nil } func NativeLastIndexOfSubstring(s Scope, e Exprs) (Expr, error) { const funcName = "NativeLastIndexOfSubstring" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } sub, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *StringExpr, found %T", funcName, rs[1]) } i := strings.LastIndex(str.Value, sub.Value) if i == -1 { return NewUnit(), nil } return NewLong(int64(i)), nil } func NativeLastIndexOfSubstringWithOffset(s Scope, e Exprs) (Expr, error) { const funcName = "NativeLastIndexOfSubstringWithOffset" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters, expected 3, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: first argument expected to be *StringExpr, found %T", funcName, rs[0]) } sub, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *StringExpr, found %T", funcName, rs[1]) } off, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s: third argument expected to be *LongExpr, found %T", funcName, rs[2]) } offset := int(off.Value) if offset < 0 { return NewUnit(), nil } i := strings.LastIndex(str.Value, sub.Value) for i > offset { i = strings.LastIndex(str.Value[:i], sub.Value) } if i == -1 { return NewUnit(), nil } return NewLong(int64(i)), nil } func UserValue(s Scope, e Exprs) (Expr, error) { const funcName = "UserValue" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrapf(err, funcName) } if _, ok := rs[0].(*Unit); ok { return nil, Throw{Message: DefaultThrowMessage} } return rs[0], nil } func UserValueOrErrorMessage(s Scope, e Exprs) (Expr, error) { const funcName = "UserValueOrErrorMessage" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrapf(err, funcName) } msg, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: second argument expected to be *StringExpr, found %T", funcName, rs[1]) } if _, ok := rs[0].(*Unit); ok { return nil, Throw{Message: msg.Value} } return rs[0], nil } func UserWriteSet(s Scope, e Exprs) (Expr, error) { const funcName = "UserWriteSet" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrapf(err, funcName) } listOfDataEntries, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("%s: first argument expected to be Exprs, found %T", funcName, rs[0]) } var dataEntries []*DataEntryExpr for _, expr := range listOfDataEntries { if expr.InstanceOf() != "DataEntry" { return nil, errors.Errorf("Expected instance of DataEntry, found %s, %T", expr.InstanceOf(), expr) } dataEntries = append(dataEntries, expr.(*DataEntryExpr)) } return NewWriteSet(dataEntries...), nil } func UserTransferSet(s Scope, e Exprs) (Expr, error) { const funcName = "UserTransferSet" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrapf(err, funcName) } listOfScriptTransfer, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("%s: first argument expected to be Exprs, found %T", funcName, rs[0]) } var transfers []*ScriptTransferExpr for _, expr := range listOfScriptTransfer { if expr.InstanceOf() != "ScriptTransfer" { return nil, errors.Errorf("Expected instance of ScriptTransfer, found %s, %T", expr.InstanceOf(), expr) } transfers = append(transfers, expr.(*ScriptTransferExpr)) } return NewTransferSet(transfers...), nil } func ScriptTransfer(s Scope, e Exprs) (Expr, error) { const funcName = "ScriptTransfer" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters, expected 3, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrapf(err, funcName) } recipient, ok := rs[0].(Recipient) if !ok { return nil, errors.Errorf("%s: expected first argument to be 'RecipientExpr', got '%T'", funcName, rs[0]) } amount, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected secnd argument to be '*LongExpr', got '%T'", funcName, rs[1]) } return NewScriptTransfer(recipient, amount, rs[2]) } func ScriptResult(s Scope, e Exprs) (Expr, error) { const funcName = "ScriptResult" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrapf(err, funcName) } writeSet, ok := rs[0].(*WriteSetExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be 'Exprs', got '%T'", funcName, rs[0]) } transferSet, ok := rs[1].(*TransferSetExpr) if !ok { return nil, errors.Errorf("%s: expected secnd argument to be 'Exprs', got '%T'", funcName, rs[1]) } return NewScriptResult(writeSet, transferSet), nil } // Issue is a constructor of IssueExpr type func Issue(s Scope, e Exprs) (Expr, error) { const funcName = "Issue" if l := len(e); l != 7 { return nil, errors.Errorf("%s: invalid number of parameters, expected 7, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } name, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be '*StringExpr', got '%T'", funcName, rs[0]) } description, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected second argument to be '*StringExpr', got '%T'", funcName, rs[1]) } quantity, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected third argument to be '*LongExpr', got '%T'", funcName, rs[2]) } decimals, ok := rs[3].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected forth argument to be '*LongExpr', got '%T'", funcName, rs[3]) } reissuable, ok := rs[4].(*BooleanExpr) if !ok { return nil, errors.Errorf("%s: expected 5th argument to be '*BooleanExpr', got '%T'", funcName, rs[4]) } //TODO: in V4 parameter #5 "script" is always Unit, reserved for future use, here we just check the type _, ok = rs[5].(*Unit) if !ok { return nil, errors.Errorf("%s: expected 6th argument to be 'Unit', got '%T'", funcName, rs[5]) } nonce, ok := rs[6].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected 7th argument to be '*LongExpr', got '%T'", funcName, rs[6]) } return NewIssueExpr(name.Value, description.Value, quantity.Value, decimals.Value, reissuable.Value, nonce.Value), nil } // Reissue is a constructor of ReissueExpr type func Reissue(s Scope, e Exprs) (Expr, error) { const funcName = "Reissue" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters, expected 3, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } assetID, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be '*BytesExpr', got '%T'", funcName, rs[0]) } reissuable, ok := rs[1].(*BooleanExpr) if !ok { return nil, errors.Errorf("%s: expected second argument to be '*BooleanExpr', got '%T'", funcName, rs[1]) } quantity, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected third argument to be '*LongExpr', got '%T'", funcName, rs[2]) } r, err := NewReissueExpr(assetID.Value, quantity.Value, reissuable.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return r, nil } func Burn(s Scope, e Exprs) (Expr, error) { const funcName = "Burn" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } assetID, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument to be '*BytesExpr', got '%T'", funcName, rs[0]) } quantity, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected second argument to be '*LongExpr', got '%T'", funcName, rs[1]) } r, err := NewBurnExpr(assetID.Value, quantity.Value) if err != nil { return nil, errors.Wrap(err, funcName) } return r, nil } func Contains(s Scope, e Exprs) (Expr, error) { const funcName = "Contains" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } str, ok := rs[0].(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected first argument of type '*StringExpr', got '%T'", funcName, rs[0]) } substr, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("%s: expected second argument of type '*StringExpr', got '%T'", funcName, rs[1]) } return NewBoolean(strings.Contains(str.Value, substr.Value)), nil } func ValueOrElse(s Scope, e Exprs) (Expr, error) { const funcName = "ValueOrElse" if l := len(e); l != 2 { return nil, errors.Errorf("%s: invalid number of parameters, expected 2, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } if _, ok := rs[0].(*Unit); ok { return rs[1], nil } return rs[0], nil } func CalculateAssetID(s Scope, e Exprs) (Expr, error) { const funcName = "CalculateAssetID" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } issue, ok := rs[0].(*IssueExpr) if !ok { return nil, errors.Errorf("%s: expected argument of type '*IssueExpr', got '%T'", funcName, rs[0]) } txID, ok := s.Value("txId") if !ok { return nil, errors.Errorf("%s: no txId in scope", funcName) } idb, ok := txID.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: invalid type of txId: '%T'", funcName, txID) } d, err := crypto.NewDigestFromBytes(idb.Value) if err != nil { return nil, errors.Wrap(err, funcName) } id := proto.GenerateIssueScriptActionID(issue.Name, issue.Description, issue.Decimals, issue.Quantity, issue.Reissuable, issue.Nonce, d) return NewBytes(id.Bytes()), nil } func TransferFromProtobuf(s Scope, e Exprs) (Expr, error) { const funcName = "TransferFromProtobuf" if l := len(e); l != 1 { return nil, errors.Errorf("%s: invalid number of parameters, expected 1, received %d", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } bytesExpr, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected argument of type *BytesExpr, got '%T'", funcName, rs[0]) } var tx proto.TransferWithProofs err = tx.UnmarshalSignedFromProtobuf(bytesExpr.Value) if err != nil { return nil, errors.Wrap(err, funcName) } err = tx.GenerateID(s.Scheme()) if err != nil { return nil, errors.Wrap(err, funcName) } //TODO: using scope's scheme is not quite correct here, because it should be possible to validate transfers from other networks obj, err := newVariablesFromTransferWithProofs(s.Scheme(), &tx) if err != nil { return nil, errors.Wrap(err, funcName) } return NewObject(obj), nil } func RebuildMerkleRoot(s Scope, e Exprs) (Expr, error) { const funcName = "RebuildMerkleRoot" if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters %d, expected 3", funcName, l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, funcName) } proofsExpr, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("%s: expected first argument of type Exprs, got '%T'", funcName, rs[0]) } if l := len(proofsExpr); l > 16 { return nil, errors.Errorf("%s: too many proofs %d, expected no more than 16", funcName, l) } proofs := make([]crypto.Digest, len(proofsExpr)) for i, x := range proofsExpr { b, ok := x.(*BytesExpr) if !ok { return nil, errors.Errorf("%s: unexpected element of type '%T' of proofs array at position %d", funcName, x, i) } d, err := crypto.NewDigestFromBytes(b.Value) if err != nil { return nil, errors.Wrap(err, funcName) } proofs[i] = d } leafExpr, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected second argument of type *BytesExpr, got '%T'", funcName, rs[1]) } leaf, err := crypto.NewDigestFromBytes(leafExpr.Value) if err != nil { return nil, errors.Wrap(err, funcName) } indexExpr, ok := rs[2].(*LongExpr) if !ok { return nil, errors.Errorf("%s: expected third argument of type *LongExpr, got '%T'", funcName, rs[2]) } index := uint64(indexExpr.Value) tree, err := crypto.NewMerkleTree() if err != nil { return nil, errors.Wrap(err, funcName) } root := tree.RebuildRoot(leaf, proofs, index) return NewBytes(root[:]), nil } func limitedGroth16Verify(limit int) Callable { fn := "Groth16Verify" if limit > 0 { fn = fmt.Sprintf("%s_%dinputs", fn, limit) } return func(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 3 { return nil, errors.Errorf("%s: invalid number of parameters %d, expected %d", fn, l, 3) } rs, err := e.EvaluateAll(s) if err != nil { return nil, errors.Wrap(err, fn) } key, ok := rs[0].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected first argument of type *BytesExpr, received %T", fn, rs[0]) } proof, ok := rs[1].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected second argument of type *BytesExpr, received %T", fn, rs[1]) } inputs, ok := rs[2].(*BytesExpr) if !ok { return nil, errors.Errorf("%s: expected third argument of type *BytesExpr, received %T", fn, rs[1]) } if l := len(inputs.Value); l > 32*limit { return nil, errors.Errorf("%s: invalid size of inputs %d bytes, must not exceed %d bytes", fn, l, limit*32) } ok, err = crypto.Groth16Verify(key.Value, proof.Value, inputs.Value) if err != nil { return nil, errors.Wrap(err, fn) } return NewBoolean(ok), nil }
func wrapWithExtract(c Callable, name string) Callable { return func(s Scope, e Exprs) (Expr, error) { rs, err := c(s, e) if err != nil { return nil, errors.Wrap(err, name) } if _, ok := rs.(*Unit); ok { return nil, Throw{Message: "failed to extract from Unit value"} } return rs, err } } func dataFromArray(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 2 { return nil, errors.Errorf("invalid params, expected 2, passed %d", l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, err } lst, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("expected first argument to be *Exprs, found %T", rs[0]) } key, ok := rs[1].(*StringExpr) if !ok { return nil, errors.Errorf("expected second argument to be *StringExpr, found %T", rs[1]) } for i, e := range lst { item, ok := e.(Getable) if !ok { return nil, errors.Errorf("unexpected list element of type %T", e) } k, err := item.Get("key") if err != nil { return nil, errors.Wrapf(err, "%dth element doesn't have 'key' field", i) } b := key.Eq(k) if b { v, err := item.Get("value") if err != nil { return nil, errors.Wrapf(err, "%dth element doesn't have 'value' field", i) } return v, nil } } return NewUnit(), nil } func extractRecipientAndKey(s Scope, e Exprs) (proto.Recipient, string, error) { if l := len(e); l != 2 { return proto.Recipient{}, "", errors.Errorf("invalid params, expected 2, passed %d", l) } addOrAliasExpr, err := e[0].Evaluate(s) if err != nil { return proto.Recipient{}, "", err } var r proto.Recipient switch a := addOrAliasExpr.(type) { case *AliasExpr: r = proto.NewRecipientFromAlias(proto.Alias(*a)) case *AddressExpr: r = proto.NewRecipientFromAddress(proto.Address(*a)) case *RecipientExpr: r = proto.Recipient(*a) default: return proto.Recipient{}, "", errors.Errorf("expected first argument of types *proto.AliasExpr of *proto.AddressExpr, found %T", addOrAliasExpr) } second, err := e[1].Evaluate(s) if err != nil { return proto.Recipient{}, "", err } key, ok := second.(*StringExpr) if !ok { return proto.Recipient{}, "", errors.Errorf("second argument expected to be *StringExpr, found %T", second) } return r, key.Value, nil } func dataFromArrayByIndex(s Scope, e Exprs) (Expr, error) { if l := len(e); l != 2 { return nil, errors.Errorf("invalid params, expected 2, passed %d", l) } rs, err := e.EvaluateAll(s) if err != nil { return nil, err } lst, ok := rs[0].(Exprs) if !ok { return nil, errors.Errorf("expected first argument to be *Exprs, found %T", rs[0]) } index, ok := rs[1].(*LongExpr) if !ok { return nil, errors.Errorf("expected second argument to be *LongExpr, found %T", rs[1]) } i := int(index.Value) if i < 0 || i >= len(lst) { return nil, errors.Errorf("invalid index %d", i) } item, ok := lst[i].(Getable) if !ok { return nil, errors.Errorf("unexpected list element of type %T", e) } v, err := item.Get("value") if err != nil { return nil, errors.Wrapf(err, "%dth element doesn't have 'value' field", i) } return v, nil } func digest(e Expr) (Hash, error) { switch e.InstanceOf() { case "NoAlg": return 0, nil case "Md5": return MD5, nil case "Sha1": return SHA1, nil case "Sha224": return SHA224, nil case "Sha256": return SHA256, nil case "Sha384": return SHA384, nil case "Sha512": return SHA512, nil case "Sha3224": return SHA3_224, nil case "Sha3256": return SHA3_256, nil case "Sha3384": return SHA3_384, nil case "Sha3512": return SHA3_512, nil default: return 0, errors.Errorf("unsupported digest %s", e.InstanceOf()) } } func prefix(w io.Writer, name string, e Exprs) { _, _ = fmt.Fprintf(w, "%s(", name) last := len(e) - 1 for i := 0; i < len(e); i++ { e[i].Write(w) if last != i { _, _ = fmt.Fprint(w, ", ") } } _, _ = fmt.Fprint(w, ")") } func infix(w io.Writer, name string, e Exprs) { e[0].Write(w) _, _ = fmt.Fprintf(w, " %s ", name) e[1].Write(w) } func writeFunction(w io.Writer, id string, e Exprs) { switch id { case "0": infix(w, "==", e) case "1": prefix(w, "_isInstanceOf", e) case "2": prefix(w, "throw", e) case "103": infix(w, ">=", e) case "108": prefix(w, "pow", e) case "109": prefix(w, "log", e) case "200": prefix(w, "size", e) case "203", "300": infix(w, "+", e) case "305": prefix(w, "size", e) case "401": e[0].Write(w) _, _ = fmt.Fprint(w, "[") e[1].Write(w) _, _ = fmt.Fprint(w, "]") case "410", "411", "412": prefix(w, "toBytes", e) case "420", "421": prefix(w, "toString", e) case "500": prefix(w, "sigVerify", e) case "501": prefix(w, "keccak256", e) case "502": prefix(w, "blake2b256", e) case "503": prefix(w, "sha256", e) case "600": prefix(w, "toBase58String", e) case "601": prefix(w, "fromBase58String", e) case "1000": prefix(w, "transactionById", e) case "1001": prefix(w, "transactionHeightById", e) case "1003": prefix(w, "assetBalance", e) case "1060": prefix(w, "addressFromRecipient", e) default: prefix(w, fmt.Sprintf("FUNCTION_%s(", id), e) } }
}
climate.py
"""Platform for Flexit AC units with CI66 Modbus adapter.""" from __future__ import annotations import logging import voluptuous as vol from homeassistant.components.climate import ( PLATFORM_SCHEMA, ClimateEntity, ClimateEntityFeature, ) from homeassistant.components.climate.const import HVAC_MODE_COOL from homeassistant.components.modbus import get_hub from homeassistant.components.modbus.const import ( CALL_TYPE_REGISTER_HOLDING, CALL_TYPE_REGISTER_INPUT, CALL_TYPE_WRITE_REGISTER, CONF_HUB, DEFAULT_HUB, ) from homeassistant.components.modbus.modbus import ModbusHub from homeassistant.const import ( ATTR_TEMPERATURE, CONF_NAME, CONF_SLAVE, DEVICE_DEFAULT_NAME, TEMP_CELSIUS, ) from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string, vol.Required(CONF_SLAVE): vol.All(int, vol.Range(min=0, max=32)), vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string, } ) _LOGGER = logging.getLogger(__name__) async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Flexit Platform.""" modbus_slave = config.get(CONF_SLAVE) name = config.get(CONF_NAME) hub = get_hub(hass, config[CONF_HUB]) async_add_entities([Flexit(hub, modbus_slave, name)], True)
class Flexit(ClimateEntity): """Representation of a Flexit AC unit.""" _attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE ) def __init__( self, hub: ModbusHub, modbus_slave: int | None, name: str | None ) -> None: """Initialize the unit.""" self._hub = hub self._name = name self._slave = modbus_slave self._target_temperature = None self._current_temperature = None self._current_fan_mode = None self._current_operation = None self._fan_modes = ["Off", "Low", "Medium", "High"] self._current_operation = None self._filter_hours = None self._filter_alarm = None self._heat_recovery = None self._heater_enabled = False self._heating = None self._cooling = None self._alarm = False self._outdoor_air_temp = None async def async_update(self): """Update unit attributes.""" self._target_temperature = await self._async_read_temp_from_register( CALL_TYPE_REGISTER_HOLDING, 8 ) self._current_temperature = await self._async_read_temp_from_register( CALL_TYPE_REGISTER_INPUT, 9 ) res = await self._async_read_int16_from_register(CALL_TYPE_REGISTER_HOLDING, 17) if res < len(self._fan_modes): self._current_fan_mode = res self._filter_hours = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 8 ) # # Mechanical heat recovery, 0-100% self._heat_recovery = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 14 ) # # Heater active 0-100% self._heating = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 15 ) # # Cooling active 0-100% self._cooling = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 13 ) # # Filter alarm 0/1 self._filter_alarm = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 27 ) # # Heater enabled or not. Does not mean it's necessarily heating self._heater_enabled = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 28 ) self._outdoor_air_temp = await self._async_read_temp_from_register( CALL_TYPE_REGISTER_INPUT, 11 ) actual_air_speed = await self._async_read_int16_from_register( CALL_TYPE_REGISTER_INPUT, 48 ) if self._heating: self._current_operation = "Heating" elif self._cooling: self._current_operation = "Cooling" elif self._heat_recovery: self._current_operation = "Recovering" elif actual_air_speed: self._current_operation = "Fan Only" else: self._current_operation = "Off" @property def extra_state_attributes(self): """Return device specific state attributes.""" return { "filter_hours": self._filter_hours, "filter_alarm": self._filter_alarm, "heat_recovery": self._heat_recovery, "heating": self._heating, "heater_enabled": self._heater_enabled, "cooling": self._cooling, "outdoor_air_temp": self._outdoor_air_temp, } @property def should_poll(self): """Return the polling state.""" return True @property def name(self): """Return the name of the climate device.""" return self._name @property def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature @property def hvac_mode(self): """Return current operation ie. heat, cool, idle.""" return self._current_operation @property def hvac_modes(self) -> list[str]: """Return the list of available hvac operation modes. Need to be a subset of HVAC_MODES. """ return [HVAC_MODE_COOL] @property def fan_mode(self): """Return the fan setting.""" return self._current_fan_mode @property def fan_modes(self): """Return the list of available fan modes.""" return self._fan_modes async def async_set_temperature(self, **kwargs): """Set new target temperature.""" if kwargs.get(ATTR_TEMPERATURE) is not None: target_temperature = kwargs.get(ATTR_TEMPERATURE) else: _LOGGER.error("Received invalid temperature") return if await self._async_write_int16_to_register(8, target_temperature * 10): self._target_temperature = target_temperature else: _LOGGER.error("Modbus error setting target temperature to Flexit") async def async_set_fan_mode(self, fan_mode): """Set new fan mode.""" if await self._async_write_int16_to_register( 17, self.fan_modes.index(fan_mode) ): self._current_fan_mode = self.fan_modes.index(fan_mode) else: _LOGGER.error("Modbus error setting fan mode to Flexit") # Based on _async_read_register in ModbusThermostat class async def _async_read_int16_from_register(self, register_type, register) -> int: """Read register using the Modbus hub slave.""" result = await self._hub.async_pymodbus_call( self._slave, register, 1, register_type ) if result is None: _LOGGER.error("Error reading value from Flexit modbus adapter") return -1 return int(result.registers[0]) async def _async_read_temp_from_register(self, register_type, register) -> float: result = float( await self._async_read_int16_from_register(register_type, register) ) if result == -1: return -1 return result / 10.0 async def _async_write_int16_to_register(self, register, value) -> bool: value = int(value) result = await self._hub.async_pymodbus_call( self._slave, register, value, CALL_TYPE_WRITE_REGISTER ) if result == -1: return False return True
app.js
var express = require('express'); var path = require('path'); var favicon = require('serve-favicon'); var logger = require('morgan'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); require("babel-core/register"); var app = express();
app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); require('./devServer').init(app); // uncomment after placing your favicon in /public //app.use(favicon(__dirname + '/public/favicon.ico')); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public'))); //app.use(express.static(path.join(__dirname, 'prototypes'))); app.use('/', require('./routes/index')); app.use('/', require('./routes/product')); app.use('/', require('./routes/price')); app.use('/', require('./middlewares/appHandler')); var errorHandlers = require('./middlewares/errorHandlers'); app.use(errorHandlers.handle404); app.use(errorHandlers.handle); module.exports = app;
// view engine setup
cache.go
// Copyright 2014 beego Author. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package cache provide a Cache interface and some implemetn engine // Usage: // // import( // "github.com/astaxie/beego/cache" // ) // // bm, err := cache.NewCache("memory", `{"interval":60}`) // // Use it like this: // // bm.Put("astaxie", 1, 10 * time.Second) // bm.Get("astaxie") // bm.IsExist("astaxie") // bm.Delete("astaxie") // // more docs http://beego.me/docs/module/cache.md package cache import ( "fmt"
// usage: // cache.Register("file",cache.NewFileCache) // this operation is run in init method of file.go. // c,err := cache.NewCache("file","{....}") // c.Put("key",value, 3600 * time.Second) // v := c.Get("key") // // c.Incr("counter") // now is 1 // c.Incr("counter") // now is 2 // count := c.Get("counter").(int) type Cache interface { // get cached value by key. Get(key string) interface{} // GetMulti is a batch version of Get. GetMulti(keys []string) []interface{} // set cached value with key and expire time. Put(key string, val interface{}, timeout time.Duration) error // delete cached value by key. Delete(key string) error // increase cached int value by key, as a counter. Incr(key string) error // decrease cached int value by key, as a counter. Decr(key string) error // check if cached value exists or not. IsExist(key string) bool // clear all cache. ClearAll() error // start gc routine based on config string settings. StartAndGC(config string) error } // Instance is a function create a new Cache Instance type Instance func() Cache var adapters = make(map[string]Instance) // Register makes a cache adapter available by the adapter name. // If Register is called twice with the same name or if driver is nil, // it panics. func Register(name string, adapter Instance) { if adapter == nil { panic("cache: Register adapter is nil") } if _, ok := adapters[name]; ok { panic("cache: Register called twice for adapter " + name) } adapters[name] = adapter } // NewCache Create a new cache driver by adapter name and config string. // config need to be correct JSON as string: {"interval":360}. // it will start gc automatically. func NewCache(adapterName, config string) (adapter Cache, err error) { instanceFunc, ok := adapters[adapterName] if !ok { err = fmt.Errorf("cache: unknown adapter name %q (forgot to import?)", adapterName) return } adapter = instanceFunc() err = adapter.StartAndGC(config) if err != nil { adapter = nil } return }
"time" ) // Cache interface contains all behaviors for cache adapter.
CoinTypeSelector.js
import React, { PureComponent } from 'react'; import { FlatList, Image, View, Text, TouchableWithoutFeedback } from 'react-native'; import Ionicons from 'react-native-vector-icons/Ionicons'; import { connect } from 'react-redux'; import { ifIphoneX } from 'react-native-iphone-x-helper'; import * as _ from 'lodash'; import Header from '../molecules/Header'; import { staticLabelColor, brandColor } from '../commonColors'; import { OBLightModal } from '../templates/OBModal'; import { COINS } from '../../utils/coins'; import CoinListItem from '../molecules/CoinListItem'; import NavCloseButton from '../atoms/NavCloseButton'; import { minUnitAmountToBCH } from '../../utils/currency'; import { convertorsMap } from '../../selectors/currency'; const styles = { wrapper: { flexDirection: 'row', alignItems: 'center', height: ifIphoneX(94, 60), paddingBottom: ifIphoneX(34, 0), backgroundColor: 'white', }, wrapperBorder: { borderTopColor: '#e8e8e8', borderTopWidth: 1, }, optionTrigger: { flexDirection: 'row', justifyContent: 'space-between', alignItems: 'center', padding: 15, }, leftWrapper: { alignItems: 'flex-start', justifyContent: 'space-between', paddingLeft: 8, }, icon: { width: 30, height: 30,
fontWeight: 'bold', letterSpacing: 0, color: '#000000', }, secondary: { fontSize: 12, letterSpacing: 0, marginTop: 2, color: staticLabelColor, }, iconUp: { paddingRight: 15, }, iconWithBalance: { paddingLeft: 13, }, rightWrapper: { alignItems: 'flex-end', justifyContent: 'space-between', }, priceByLocalCurrency: { fontSize: 15, fontWeight: 'bold', fontStyle: 'normal', letterSpacing: 0, color: brandColor, }, }; class CoinTypeSelector extends PureComponent { constructor(props) { super(props); this.state = { coin: props.coin, showModal: false, }; } keyExtractor = (item, index) => `coin_item_${index}` handleShowModal = () => { this.setState({ showModal: true }); }; handleCoinSelected = (coin) => { this.setState( { coin, showModal: false }, () => { this.props.onChange(coin); }, ); } renderItem = ({ item }) => { const { coin } = this.props; const childProps = { coin: item, onCoinSelected: this.handleCoinSelected, }; return ( <CoinListItem {...childProps} selected={coin === item} /> ); } render() { const { coin, showModal } = this.state; const { localLabelFromLocal, convertBalanceFromBCH, showBalance, noBorder } = this.props; const balance = !COINS[coin].disabled && this.props.balance[coin]; const { label, icon, disabled } = COINS[coin]; const { cBalance } = convertBalanceFromBCH(balance, coin); return ( <View style={[styles.wrapper, !noBorder && styles.wrapperBorder]}> <TouchableWithoutFeedback onPress={this.handleShowModal}> <View style={styles.optionTrigger}> <Image style={styles.icon} source={icon} resizeMode="cover" /> <View style={styles.leftWrapper}> <Text style={styles.coinName}>{label}</Text> <Text style={styles.secondary}>{coin}</Text> </View> <View style={{ flex: 1 }} /> {showBalance && ( <View style={styles.rightWrapper}> <Text style={styles.priceByLocalCurrency}> {localLabelFromLocal(cBalance)} </Text> <Text style={styles.secondary}> {(disabled || !balance) ? 'Coming Soon' : `${minUnitAmountToBCH(balance.confirmed, coin)} ${coin}`} </Text> </View> )} <Ionicons style={showBalance ? styles.iconWithBalance : styles.iconUp} name="ios-arrow-up" size={18} color={staticLabelColor} /> </View> </TouchableWithoutFeedback> <OBLightModal animationType="slide" transparent visible={showModal} onRequestClose={() => {}} > <Header modal left={<NavCloseButton />} onLeft={() => this.setState({ showModal: false })} /> <FlatList keyExtractor={this.keyExtractor} data={Object.keys(COINS)} renderItem={this.renderItem} /> </OBLightModal> </View> ); } } const mapStateToProps = state => ({ balance: state.wallet.balance, ...convertorsMap(state), }); export default connect(mapStateToProps)(CoinTypeSelector);
}, coinName: { fontSize: 14,
505 The Maze II.py
#!/usr/bin/python3 """ premium question """ from typing import List import heapq dirs = [(0, -1), (0, 1), (-1, 0), (1, 0)] class Solution: def shortestDistance(self, maze: List[List[int]], start: List[int], destination: List[int]) -> int: """ No friction rolling ball F[i][j][dir] = min distance given direction S[i][j] = whether stoppable Dijkstra's algorith, reduce to a graph problem
D = [[float("inf") for _ in range(n)] for _ in range(m)] # distance matrix i, j = start D[i][j] = 0 q = [(0, i, j)] while q: dist, i, j = heapq.heappop(q) for di, dj in dirs: cur_dist = 0 I = i J = j # look ahead while 0 <= I + di < m and 0 <= J + dj < n and maze[I + di][J + dj] == 0: I += di J += dj cur_dist += 1 if dist + cur_dist < D[I][J]: D[I][J] = dist + cur_dist heapq.heappush(q, (D[I][J], I, J)) i, j = destination return D[i][j] if D[i][j] != float("inf") else -1 if __name__ == "__main__": assert Solution().shortestDistance([[0,0,1,0,0],[0,0,0,0,0],[0,0,0,1,0],[1,1,0,1,1],[0,0,0,0,0]], [0,4], [4,4]) == 12
""" m, n = len(maze), len(maze[0])
u32.rs
use crate::shape::compute_strides; use crate::shape::get_size; use crate::tensor::Tensor; use js_sys::Float32Array; use js_sys::Int32Array; use js_sys::Uint32Array; use wasm_bindgen::prelude::*; #[wasm_bindgen] #[derive(Debug, Clone)] pub struct TensorU32 { tensor: Tensor<u32>, } type Elem = u32; type Sel = TensorU32; impl TensorU32 { pub fn get_tensor(&self) -> &Tensor<u32> { &self.tensor } } #[wasm_bindgen] impl TensorU32 { pub fn create(shape: Uint32Array, values: Uint32Array) -> TensorU32 { let mut _shape: Vec<usize> = vec![0; shape.length() as usize]; for i in 0.._shape.len() { _shape[i] = shape.get_index(i as u32) as usize; } let strides = compute_strides(&_shape); let size = get_size(&_shape); let mut _values: Vec<u32> = vec![0; values.length() as usize]; for i in 0.._values.len() { _values[i] = values.get_index(i as u32); } TensorU32 { tensor: Tensor::new(_shape, strides, size, _values), } } pub fn create_constant(shape: Uint32Array, value: u32) -> TensorU32 { let mut _shape: Vec<usize> = vec![0; shape.length() as usize]; for i in 0.._shape.len() { _shape[i] = shape.get_index(i as u32) as usize; } let strides = compute_strides(&_shape); let size = get_size(&_shape); let values = vec![value; size]; TensorU32 { tensor: Tensor::new(_shape, strides, size, values), } } pub fn get_vals(&self) -> Uint32Array { let arr = Uint32Array::new_with_length(self.tensor.size as u32); for i in 0..self.tensor.size { arr.set_index(i as u32, self.tensor.get_ix(i)); } return arr; } pub fn get_shape(&self) -> Uint32Array { let arr = Uint32Array::new_with_length(self.tensor.rank() as u32); for i in 0..self.tensor.rank() { arr.set_index(i as u32, self.tensor.get_dim_size(i) as u32); } return arr; } pub fn power_scalar(&self, power: u32, factor: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.power_scalar_int(power, factor), } } pub fn add_multiply_scalar(&self, factor: u32, add: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.add_multiply_scalar(factor, add), } } pub fn clip(&self, min: u32, max: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.clip(min, max), } } pub fn clip_min(&self, min: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.clip_min(min), } }
pub fn clip_max(&self, max: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.clip_max(max), } } pub fn power(&self, other: &TensorU32) -> TensorU32 { TensorU32 { tensor: self.tensor.power_int(&other.tensor), } } pub fn addition(&self, other: &TensorU32, alpha: u32, beta: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.addition(&other.tensor, alpha, beta), } } pub fn subtraction(&self, other: &TensorU32, alpha: u32, beta: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.subtraction(&other.tensor, alpha, beta), } } pub fn multiply(&self, other: &TensorU32, alpha: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.multiply(&other.tensor, alpha), } } pub fn divide(&self, other: &TensorU32, alpha: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.divide(&other.tensor, alpha), } } pub fn clip_backward(&self, min: u32, max: u32, grad: &TensorU32) -> TensorU32 { TensorU32 { tensor: self.tensor.clip_backward(min, max, &grad.tensor), } } pub fn clip_min_backward(&self, min: u32, grad: &TensorU32) -> TensorU32 { TensorU32 { tensor: self.tensor.clip_min_backward(min, &grad.tensor), } } pub fn clip_max_backward(&self, max: u32, grad: &TensorU32) -> TensorU32 { TensorU32 { tensor: self.tensor.clip_max_backward(max, &grad.tensor), } } pub fn sum(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.sum(axes, keep_dims), } } pub fn sum_square(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.sum_square(axes, keep_dims), } } pub fn product(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.product(axes, keep_dims), } } pub fn max(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.max(axes, keep_dims), } } pub fn min(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.min(axes, keep_dims), } } pub fn reduce_mean(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.reduce_mean(axes, keep_dims), } } pub fn reduce_mean_square(&self, axes: Uint32Array, keep_dims: bool) -> TensorU32 { TensorU32 { tensor: self.tensor.reduce_mean_square(axes, keep_dims), } } pub fn conv( &self, kernel: &TensorU32, dilations: Uint32Array, group: u32, pads: Uint32Array, strides: Uint32Array, activation: u32, ) -> TensorU32 { TensorU32 { tensor: self .tensor .conv(&kernel.tensor, dilations, group, pads, strides, activation), } } pub fn conv_with_bias( &self, kernel: &TensorU32, bias: &TensorU32, dilations: Uint32Array, group: u32, pads: Uint32Array, strides: Uint32Array, activation: u32, ) -> TensorU32 { TensorU32 { tensor: self.tensor.conv_with_bias( &kernel.tensor, &bias.tensor, dilations, group, pads, strides, activation, ), } } pub fn conv_transpose( &self, kernel: &TensorU32, dilations: Uint32Array, group: u32, pads: Uint32Array, strides: Uint32Array, ) -> TensorU32 { TensorU32 { tensor: self .tensor .conv_transpose(&kernel.tensor, dilations, group, pads, strides), } } pub fn average_pool( &self, kernel_shape: Uint32Array, pads: Uint32Array, strides: Uint32Array, include_pad: bool, ) -> TensorU32 { TensorU32 { tensor: self .tensor .average_pool(kernel_shape, pads, strides, include_pad), } } // Mode: 0 == constant, 1 == reflect, 2 == edge pub fn pad(&self, pads: Uint32Array, mode: i32, value: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.pad(pads, mode, value), } } pub fn upsample(&self, scales: Float32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.upsample(scales), } } pub fn matmul(&self, other: &TensorU32) -> TensorU32 { TensorU32 { tensor: self.tensor.matmul(&other.tensor), } } pub fn gemm( &self, other: &TensorU32, a_transpose: bool, b_transpose: bool, alpha: u32, ) -> TensorU32 { TensorU32 { tensor: self .tensor .gemm(&other.tensor, a_transpose, b_transpose, alpha), } } pub fn gemm_with_c( &self, other: &TensorU32, a_transpose: bool, b_transpose: bool, alpha: u32, c: &TensorU32, beta: u32, ) -> TensorU32 { TensorU32 { tensor: self.tensor.gemm_with_c( &other.tensor, a_transpose, b_transpose, alpha, &c.tensor, beta, ), } } pub fn set_values(&self, values: &TensorU32, starts: Uint32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.set_values(&values.tensor, starts), } } pub fn reshape(&self, shape: Uint32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.reshape(shape), } } pub fn concat(&self, other: &TensorU32, axes: u32) -> TensorU32 { TensorU32 { tensor: self.tensor.concat(&other.tensor, axes), } } pub fn transpose(&self, permutation: Uint32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.transpose(permutation), } } pub fn repeat(&self, repeats: Uint32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.repeat(repeats), } } pub fn expand(&self, shape: Uint32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.expand(shape), } } pub fn copy(&self) -> TensorU32 { TensorU32 { tensor: self.tensor.copy(), } } // Mode: 0 == constant, 1 == reflect, 2 == edge pub fn gather(&self, axis: i32, indices: Uint32Array, indice_shape: Uint32Array) -> TensorU32 { TensorU32 { tensor: self.tensor.gather(axis, indices, indice_shape), } } pub fn slice( &self, starts: Uint32Array, ends: Uint32Array, axis: Uint32Array, steps: Int32Array, ) -> TensorU32 { TensorU32 { tensor: self.tensor.slice(starts, ends, axis, steps), } } pub fn reshape_sparse_indices( &self, old_sparse_shape: Uint32Array, new_shape: Uint32Array, ) -> TensorU32 { TensorU32 { tensor: self .tensor .reshape_sparse_indices(old_sparse_shape, new_shape), } } pub fn add_index(&self, axis: i32, count: i32) -> TensorU32 { TensorU32 { tensor: self.tensor.add_index(axis, count), } } pub fn repeat_sparse_indices( &self, repeats: Uint32Array, shape: Uint32Array, repeats_prod: u32, ) -> TensorU32 { TensorU32 { tensor: self .tensor .repeat_sparse_indices(repeats, shape, repeats_prod), } } pub fn matmul_sparse_dense(&self, indices: &TensorU32, b: &TensorU32, m: usize) -> TensorU32 { TensorU32 { tensor: self .tensor .matmul_sparse_dense(&indices.tensor, &b.tensor, m), } } pub fn add_sparse_dense( &self, indices: &TensorU32, b: &Sel, result_shape: Uint32Array, alpha: Elem, beta: Elem, ) -> Self { Self { tensor: self.tensor.add_sparse_dense( indices.get_tensor(), &b.tensor, result_shape, alpha, beta, ), } } pub fn subtract_sparse_dense( &self, indices: &TensorU32, b: &Sel, result_shape: Uint32Array, alpha: Elem, beta: Elem, ) -> Self { Self { tensor: self.tensor.subtract_sparse_dense( indices.get_tensor(), &b.tensor, result_shape, alpha, beta, ), } } pub fn multiply_sparse_dense( &self, indices: &TensorU32, b: &Sel, result_shape: Uint32Array, alpha: Elem, ) -> Self { Self { tensor: self.tensor.multiply_sparse_dense( indices.get_tensor(), &b.tensor, result_shape, alpha, ), } } pub fn divide_sparse_dense( &self, indices: &TensorU32, b: &Sel, result_shape: Uint32Array, alpha: Elem, ) -> Self { Self { tensor: self.tensor.divide_sparse_dense( indices.get_tensor(), &b.tensor, result_shape, alpha, ), } } pub fn add_sparse_sparse( &self, indices: &TensorU32, b_indices: &TensorU32, b_values: &Sel, result_shape: Uint32Array, alpha: Elem, beta: Elem, ) -> Self { Self { tensor: self.tensor.add_sparse_sparse( indices.get_tensor(), b_indices.get_tensor(), &b_values.tensor, result_shape, alpha, beta, ), } } pub fn subtract_sparse_sparse( &self, indices: &TensorU32, b_indices: &TensorU32, b_values: &Sel, result_shape: Uint32Array, alpha: Elem, beta: Elem, ) -> Self { Self { tensor: self.tensor.subtract_sparse_sparse( indices.get_tensor(), b_indices.get_tensor(), &b_values.tensor, result_shape, alpha, beta, ), } } pub fn divide_sparse_sparse( &self, indices: &TensorU32, b_indices: &TensorU32, b_values: &Sel, result_shape: Uint32Array, alpha: Elem, ) -> Self { Self { tensor: self.tensor.divide_sparse_sparse( indices.get_tensor(), b_indices.get_tensor(), &b_values.tensor, result_shape, alpha, ), } } pub fn multiply_sparse_sparse( &self, indices: &TensorU32, b_indices: &TensorU32, b_values: &Sel, result_shape: Uint32Array, alpha: Elem, ) -> Self { Self { tensor: self.tensor.multiply_sparse_sparse( indices.get_tensor(), b_indices.get_tensor(), &b_values.tensor, result_shape, alpha, ), } } pub fn sum_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self .tensor .sum_sparse(shape, indices.get_tensor(), axes, keep_dims), } } pub fn sum_square_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self .tensor .sum_square_sparse(shape, indices.get_tensor(), axes, keep_dims), } } pub fn reduce_mean_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self .tensor .reduce_mean_sparse(shape, indices.get_tensor(), axes, keep_dims), } } pub fn product_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self .tensor .product_sparse(shape, indices.get_tensor(), axes, keep_dims), } } pub fn max_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self .tensor .max_sparse(shape, indices.get_tensor(), axes, keep_dims), } } pub fn min_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self .tensor .min_sparse(shape, indices.get_tensor(), axes, keep_dims), } } pub fn reduce_mean_squared_sparse( &self, shape: Uint32Array, indices: &TensorU32, axes: Uint32Array, keep_dims: bool, ) -> Self { Self { tensor: self.tensor.reduce_mean_squared_sparse( shape, indices.get_tensor(), axes, keep_dims, ), } } }
types.ts
export default { UserRepository: Symbol.for('USER_REPOSITORY'),
};