file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
test_auto_DataGrabber.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ...testing import assert_equal
from ..io import DataGrabber
def test_DataGrabber_inputs():
input_map = dict(base_directory=dict(),
ignore_exception=dict(nohash=True,
usedefault=True,
),
raise_on_empty=dict(usedefault=True,
),
sort_filelist=dict(mandatory=True,
),
template=dict(mandatory=True,
),
template_args=dict(),
)
inputs = DataGrabber.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_DataGrabber_outputs(): | output_map = dict()
outputs = DataGrabber.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value | |
rdn.py | # Residual Dense Network for Image Super-Resolution
# https://arxiv.org/abs/1802.08797
# modified from: https://github.com/thstkdgus35/EDSR-PyTorch
from argparse import Namespace
import torch
import torch.nn as nn
from models import register
class RDB_Conv(nn.Module):
def __init__(self, inChannels, growRate, kSize=3):
super(RDB_Conv, self).__init__()
Cin = inChannels
G = growRate
self.conv = nn.Sequential(*[
#nn.Conv2d(Cin, G, kSize, padding=(kSize-1)//2, stride=1),
nn.Conv3d(Cin, G, kSize, padding=(kSize - 1) // 2, stride=1),
nn.ReLU()
])
def forward(self, x):
out = self.conv(x)
return torch.cat((x, out), 1)
class RDB(nn.Module):
def __init__(self, growRate0, growRate, nConvLayers, kSize=3):
super(RDB, self).__init__()
G0 = growRate0
G = growRate
C = nConvLayers
convs = []
for c in range(C):
convs.append(RDB_Conv(G0 + c*G, G))
self.convs = nn.Sequential(*convs)
# Local Feature Fusion
self.LFF = nn.Conv3d(G0 + C * G, G0, 1, padding=0, stride=1)
#self.LFF = nn.Conv2d(G0 + C*G, G0, 1, padding=0, stride=1)
def forward(self, x):
return self.LFF(self.convs(x)) + x
class RDN(nn.Module):
def __init__(self, args):
super(RDN, self).__init__()
self.args = args
r = args.scale[0]
G0 = args.G0
kSize = args.RDNkSize
# number of RDB blocks, conv layers, out channels
self.D, C, G = {
'A': (20, 6, 32),
'B': (16, 8, 64),
}[args.RDNconfig]
# Shallow feature extraction net
#self.SFENet1 = nn.Conv2d(args.n_colors, G0, kSize, padding=(kSize-1)//2, stride=1)
#self.SFENet2 = nn.Conv2d(G0, G0, kSize, padding=(kSize-1)//2, stride=1)
self.SFENet1 = nn.Conv3d(args.n_colors, G0, kSize, padding=(kSize-1)//2, stride=1)
self.SFENet2 = nn.Conv3d(G0, G0, kSize, padding=(kSize-1)//2, stride=1)
# Redidual dense blocks and dense feature fusion
self.RDBs = nn.ModuleList()
for i in range(self.D):
self.RDBs.append(
RDB(growRate0 = G0, growRate = G, nConvLayers = C)
)
# Global Feature Fusion
self.GFF = nn.Sequential(*[
#nn.Conv2d(self.D * G0, G0, 1, padding=0, stride=1),
#nn.Conv2d(G0, G0, kSize, padding=(kSize-1)//2, stride=1)
nn.Conv3d(self.D * G0, G0, 1, padding=0, stride=1),
nn.Conv3d(G0, G0, kSize, padding=(kSize - 1) // 2, stride=1)
])
if args.no_upsampling:
self.out_dim = G0
else:
self.out_dim = args.n_colors
# Up-sampling net
if r == 2 or r == 3:
self.UPNet = nn.Sequential(*[
nn.Conv2d(G0, G * r * r, kSize, padding=(kSize-1)//2, stride=1),
nn.PixelShuffle(r),
nn.Conv2d(G, args.n_colors, kSize, padding=(kSize-1)//2, stride=1)
])
elif r == 4:
self.UPNet = nn.Sequential(*[
nn.Conv2d(G0, G * 4, kSize, padding=(kSize-1)//2, stride=1),
nn.PixelShuffle(2),
nn.Conv2d(G, G * 4, kSize, padding=(kSize-1)//2, stride=1),
nn.PixelShuffle(2),
nn.Conv2d(G, args.n_colors, kSize, padding=(kSize-1)//2, stride=1)
])
else:
raise ValueError("scale must be 2 or 3 or 4.")
def forward(self, x):
f__1 = self.SFENet1(x)
x = self.SFENet2(f__1)
RDBs_out = []
for i in range(self.D):
x = self.RDBs[i](x)
RDBs_out.append(x)
x = self.GFF(torch.cat(RDBs_out,1))
x += f__1
if self.args.no_upsampling:
return x
else:
return self.UPNet(x)
@register('rdn')
def make_rdn(G0=64, RDNkSize=3, RDNconfig='B',
scale=2, no_upsampling=False):
| args = Namespace()
args.G0 = G0
args.RDNkSize = RDNkSize
args.RDNconfig = RDNconfig
args.scale = [scale]
args.no_upsampling = no_upsampling
args.n_colors = 3
return RDN(args) |
|
aead.rs | // Copyright 2015-2016 Brian Smith.
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
//! Authenticated Encryption with Associated Data (AEAD).
//!
//! See [Authenticated encryption: relations among notions and analysis of the
//! generic composition paradigm][AEAD] for an introduction to the concept of
//! AEADs.
//!
//! [AEAD]: http://www-cse.ucsd.edu/~mihir/papers/oem.html
//! [`crypto.cipher.AEAD`]: https://golang.org/pkg/crypto/cipher/#AEAD
use self::block::{Block, BLOCK_LEN};
use crate::{
constant_time, cpu, error,
polyfill::{self, convert::*},
};
pub use self::{
aes_gcm::{AES_128_GCM, AES_256_GCM},
chacha20_poly1305::CHACHA20_POLY1305,
nonce::{Nonce, NONCE_LEN},
};
/// A key for authenticating and decrypting (“opening”) AEAD-protected data.
pub struct OpeningKey {
key: Key,
}
impl OpeningKey {
/// Create a new opening key.
///
/// `key_bytes` must be exactly `algorithm.key_len` bytes long.
#[inline]
pub fn new(
algorithm: &'static Algorithm, key_bytes: &[u8],
) -> Result<OpeningKey, error::Unspecified> {
Ok(OpeningKey {
key: Key::new(algorithm, key_bytes)?,
})
}
/// The key's AEAD algorithm.
#[inline(always)]
pub fn algorithm(&self) -> &'static Algorithm { self.key.algorithm() }
}
/// Authenticates and decrypts (“opens”) data in place.
///
/// The input may have a prefix that is `in_prefix_len` bytes long; any such
/// prefix is ignored on input and overwritten on output. The last
/// `key.algorithm().tag_len()` bytes of `ciphertext_and_tag_modified_in_place`
/// must be the tag. The part of `ciphertext_and_tag_modified_in_place` between
/// the prefix and the tag is the input ciphertext.
///
/// When `open_in_place()` returns `Ok(plaintext)`, the decrypted output is
/// `plaintext`, which is
/// `&mut ciphertext_and_tag_modified_in_place[..plaintext.len()]`. That is,
/// the output plaintext overwrites some or all of the prefix and ciphertext.
/// To put it another way, the ciphertext is shifted forward `in_prefix_len`
/// bytes and then decrypted in place. To have the output overwrite the input
/// without shifting, pass 0 as `in_prefix_len`.
///
/// When `open_in_place()` returns `Err(..)`,
/// `ciphertext_and_tag_modified_in_place` may have been overwritten in an
/// unspecified way.
///
/// The shifting feature is useful in the case where multiple packets are
/// being reassembled in place. Consider this example where the peer has sent
/// the message “Split stream reassembled in place” split into three sealed
/// packets:
///
/// ```ascii-art
/// Packet 1 Packet 2 Packet 3
/// Input: [Header][Ciphertext][Tag][Header][Ciphertext][Tag][Header][Ciphertext][Tag]
/// | +--------------+ |
/// +------+ +-----+ +----------------------------------+
/// v v v
/// Output: [Plaintext][Plaintext][Plaintext]
/// “Split stream reassembled in place”
/// ```
///
/// Let's say the header is always 5 bytes (like TLS 1.2) and the tag is always
/// 16 bytes (as for AES-GCM and ChaCha20-Poly1305). Then for this example,
/// `in_prefix_len` would be `5` for the first packet, `(5 + 16) + 5` for the
/// second packet, and `(2 * (5 + 16)) + 5` for the third packet.
///
/// (The input/output buffer is expressed as combination of `in_prefix_len`
/// and `ciphertext_and_tag_modified_in_place` because Rust's type system
/// does not allow us to have two slices, one mutable and one immutable, that
/// reference overlapping memory.) | pub fn open_in_place<'a>(
key: &OpeningKey, nonce: Nonce, aad: Aad, in_prefix_len: usize,
ciphertext_and_tag_modified_in_place: &'a mut [u8],
) -> Result<&'a mut [u8], error::Unspecified> {
let ciphertext_and_tag_len = ciphertext_and_tag_modified_in_place
.len()
.checked_sub(in_prefix_len)
.ok_or(error::Unspecified)?;
let ciphertext_len = ciphertext_and_tag_len
.checked_sub(TAG_LEN)
.ok_or(error::Unspecified)?;
check_per_nonce_max_bytes(key.key.algorithm, ciphertext_len)?;
let (in_out, received_tag) =
ciphertext_and_tag_modified_in_place.split_at_mut(in_prefix_len + ciphertext_len);
let Tag(calculated_tag) =
(key.key.algorithm.open)(&key.key.inner, nonce, aad, in_prefix_len, in_out);
if constant_time::verify_slices_are_equal(calculated_tag.as_ref(), received_tag).is_err() {
// Zero out the plaintext so that it isn't accidentally leaked or used
// after verification fails. It would be safest if we could check the
// tag before decrypting, but some `open` implementations interleave
// authentication with decryption for performance.
for b in &mut in_out[..ciphertext_len] {
*b = 0;
}
return Err(error::Unspecified);
}
// `ciphertext_len` is also the plaintext length.
Ok(&mut in_out[..ciphertext_len])
}
/// A key for encrypting and signing (“sealing”) data.
pub struct SealingKey {
key: Key,
}
impl SealingKey {
/// Constructs a new sealing key from `key_bytes`.
#[inline]
pub fn new(
algorithm: &'static Algorithm, key_bytes: &[u8],
) -> Result<SealingKey, error::Unspecified> {
Ok(SealingKey {
key: Key::new(algorithm, key_bytes)?,
})
}
/// The key's AEAD algorithm.
#[inline(always)]
pub fn algorithm(&self) -> &'static Algorithm { self.key.algorithm() }
}
/// Encrypts and signs (“seals”) data in place.
///
/// `nonce` must be unique for every use of the key to seal data.
///
/// The input is `in_out[..(in_out.len() - out_suffix_capacity)]`; i.e. the
/// input is the part of `in_out` that precedes the suffix. When
/// `seal_in_place()` returns `Ok(out_len)`, the encrypted and signed output is
/// `in_out[..out_len]`; i.e. the output has been written over input and at
/// least part of the data reserved for the suffix. (The input/output buffer
/// is expressed this way because Rust's type system does not allow us to have
/// two slices, one mutable and one immutable, that reference overlapping
/// memory at the same time.)
///
/// `out_suffix_capacity` must be at least `key.algorithm().tag_len()`. See
/// also `MAX_TAG_LEN`.
///
/// `aad` is the additional authenticated data, if any.
pub fn seal_in_place(
key: &SealingKey, nonce: Nonce, aad: Aad, in_out: &mut [u8], out_suffix_capacity: usize,
) -> Result<usize, error::Unspecified> {
if out_suffix_capacity < key.key.algorithm.tag_len() {
return Err(error::Unspecified);
}
let in_out_len = in_out
.len()
.checked_sub(out_suffix_capacity)
.ok_or(error::Unspecified)?;
check_per_nonce_max_bytes(key.key.algorithm, in_out_len)?;
let (in_out, tag_out) = in_out.split_at_mut(in_out_len);
let tag_out: &mut [u8; TAG_LEN] = tag_out.try_into_()?;
let Tag(tag) = (key.key.algorithm.seal)(&key.key.inner, nonce, aad, in_out);
tag_out.copy_from_slice(tag.as_ref());
Ok(in_out_len + TAG_LEN)
}
/// The additionally authenticated data (AAD) for an opening or sealing
/// operation. This data is authenticated but is **not** encrypted.
#[repr(transparent)]
pub struct Aad<'a>(&'a [u8]);
impl<'a> Aad<'a> {
/// Construct the `Aad` by borrowing a contiguous sequence of bytes.
#[inline]
pub fn from(aad: &'a [u8]) -> Self { Aad(aad) }
}
impl Aad<'static> {
/// Construct an empty `Aad`.
pub fn empty() -> Self { Self::from(&[]) }
}
/// `OpeningKey` and `SealingKey` are type-safety wrappers around `Key`, which
/// does all the actual work via the C AEAD interface.
struct Key {
inner: KeyInner,
algorithm: &'static Algorithm,
}
#[allow(variant_size_differences)]
enum KeyInner {
AesGcm(aes_gcm::Key),
ChaCha20Poly1305(chacha20_poly1305::Key),
}
impl Key {
fn new(algorithm: &'static Algorithm, key_bytes: &[u8]) -> Result<Self, error::Unspecified> {
cpu::cache_detected_features();
Ok(Key {
inner: (algorithm.init)(key_bytes)?,
algorithm,
})
}
/// The key's AEAD algorithm.
#[inline(always)]
fn algorithm(&self) -> &'static Algorithm { self.algorithm }
}
/// An AEAD Algorithm.
pub struct Algorithm {
init: fn(key: &[u8]) -> Result<KeyInner, error::Unspecified>,
seal: fn(key: &KeyInner, nonce: Nonce, aad: Aad, in_out: &mut [u8]) -> Tag,
open:
fn(key: &KeyInner, nonce: Nonce, aad: Aad, in_prefix_len: usize, in_out: &mut [u8]) -> Tag,
key_len: usize,
id: AlgorithmID,
/// Use `max_input_len!()` to initialize this.
// TODO: Make this `usize`.
max_input_len: u64,
}
const fn max_input_len(block_len: usize, overhead_blocks_per_nonce: usize) -> u64 {
// Each of our AEADs use a 32-bit block counter so the maximum is the
// largest input that will not overflow the counter.
((1u64 << 32) - polyfill::u64_from_usize(overhead_blocks_per_nonce))
* polyfill::u64_from_usize(block_len)
}
impl Algorithm {
/// The length of the key.
#[inline(always)]
pub fn key_len(&self) -> usize { self.key_len }
/// The length of a tag.
///
/// See also `MAX_TAG_LEN`.
#[inline(always)]
pub fn tag_len(&self) -> usize { TAG_LEN }
/// The length of the nonces.
#[inline(always)]
pub fn nonce_len(&self) -> usize { NONCE_LEN }
}
derive_debug_via_id!(Algorithm);
#[derive(Debug, Eq, PartialEq)]
enum AlgorithmID {
AES_128_GCM,
AES_256_GCM,
CHACHA20_POLY1305,
}
impl PartialEq for Algorithm {
fn eq(&self, other: &Self) -> bool { self.id == other.id }
}
impl Eq for Algorithm {}
/// An authentication tag.
#[must_use]
#[repr(C)]
struct Tag(Block);
// All the AEADs we support use 128-bit tags.
const TAG_LEN: usize = BLOCK_LEN;
/// The maximum length of a tag for the algorithms in this module.
pub const MAX_TAG_LEN: usize = TAG_LEN;
fn check_per_nonce_max_bytes(alg: &Algorithm, in_out_len: usize) -> Result<(), error::Unspecified> {
if polyfill::u64_from_usize(in_out_len) > alg.max_input_len {
return Err(error::Unspecified);
}
Ok(())
}
#[derive(Clone, Copy)]
enum Direction {
Opening { in_prefix_len: usize },
Sealing,
}
mod aes;
mod aes_gcm;
mod block;
mod chacha;
mod chacha20_poly1305;
pub mod chacha20_poly1305_openssh;
mod gcm;
mod nonce;
mod poly1305;
pub mod quic;
mod shift; | |
PolygonPShapeOOP2.pyde | """
PolygonPShapeOOP.
Wrapping a PShape inside a custom class
and demonstrating how we can have a multiple objects each
using the same PShape.
"""
from polygon import Polygon
# A list of objects
polygons = []
def | ():
size(640, 360, P2D)
smooth()
# Make a PShape.
star = createShape()
star.beginShape()
star.noStroke()
star.fill(0, 127)
star.vertex(0, -50)
star.vertex(14, -20)
star.vertex(47, -15)
star.vertex(23, 7)
star.vertex(29, 40)
star.vertex(0, 25)
star.vertex(-29, 40)
star.vertex(-23, 7)
star.vertex(-47, -15)
star.vertex(-14, -20)
star.endShape(CLOSE)
# Pass in reference to the PShape.
# We coud make polygons with different PShapes.
for i in range(25):
polygons.append(Polygon(star))
def draw():
background(255)
# Display and move them all.
for poly in polygons:
poly.display()
poly.move()
| setup |
comparator.rs | //! All keys in leveldb are compared by their binary value unless
//! defined otherwise.
//!
//! Comparators allow to override this comparison.
//! The ordering of keys introduced by the comparator influences iteration order.
//! Databases written with one Comparator cannot be opened with another.
use leveldb_sys::*;
use libc::{size_t, c_void, c_char};
use std::slice;
use std::cmp::Ordering;
/// A comparator has two important functions:
///
/// * the name function returns a fixed name to detect errors when
/// opening databases with a different name
/// * The comparison implementation
pub trait Comparator {
/// Return the name of the Comparator
fn name(&self) -> *const c_char;
/// compare two keys. This must implement a total ordering.
fn compare(&self, a: &[u8], b: &[u8]) -> Ordering {
a.cmp(b)
}
/// whether the comparator is the `DefaultComparator`
fn null() -> bool {
false
}
}
/// DefaultComparator is the a stand in for "no comparator set"
#[derive(Copy,Clone)]
pub struct DefaultComparator;
unsafe trait InternalComparator: Comparator where Self: Sized {
extern "C" fn name(state: *mut c_void) -> *const c_char {
let x = unsafe { &*(state as *mut Self) };
x.name()
}
extern "C" fn compare(
state: *mut c_void,
a: *const c_char,
a_len: size_t,
b: *const c_char,
b_len: size_t
) -> i32 {
unsafe {
let a_slice = slice::from_raw_parts::<u8>(a as *const u8, a_len as usize);
let b_slice = slice::from_raw_parts::<u8>(b as *const u8, b_len as usize);
let x = &*(state as *mut Self);
match x.compare(&a_slice, &b_slice) {
Ordering::Less => -1,
Ordering::Equal => 0,
Ordering::Greater => 1,
}
}
}
extern "C" fn | (state: *mut c_void) {
let _x: Box<Self> = unsafe { Box::from_raw(state as *mut Self) };
// let the Box fall out of scope and run the T's destructor
}
}
unsafe impl<C: Comparator> InternalComparator for C {}
#[allow(missing_docs)]
pub fn create_comparator<T: Comparator>(x: Box<T>) -> *mut leveldb_comparator_t {
unsafe {
leveldb_comparator_create(Box::into_raw(x) as *mut c_void,
<T as InternalComparator>::destructor,
<T as InternalComparator>::compare,
<T as InternalComparator>::name)
}
}
impl Comparator for DefaultComparator {
fn name(&self) -> *const c_char {
"default_comparator".as_ptr() as *const c_char
}
fn null() -> bool {
true
}
}
| destructor |
download_grpc.pb.go | // Copyright 2020-2021 Buf Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.1.0
// - protoc (unknown)
// source: buf/alpha/registry/v1alpha1/download.proto
package registryv1alpha1
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// DownloadServiceClient is the client API for DownloadService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type DownloadServiceClient interface {
// Download downloads.
Download(ctx context.Context, in *DownloadRequest, opts ...grpc.CallOption) (*DownloadResponse, error)
}
type downloadServiceClient struct {
cc grpc.ClientConnInterface
}
func NewDownloadServiceClient(cc grpc.ClientConnInterface) DownloadServiceClient {
return &downloadServiceClient{cc}
}
func (c *downloadServiceClient) Download(ctx context.Context, in *DownloadRequest, opts ...grpc.CallOption) (*DownloadResponse, error) {
out := new(DownloadResponse)
err := c.cc.Invoke(ctx, "/buf.alpha.registry.v1alpha1.DownloadService/Download", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DownloadServiceServer is the server API for DownloadService service.
// All implementations should embed UnimplementedDownloadServiceServer
// for forward compatibility
type DownloadServiceServer interface {
// Download downloads.
Download(context.Context, *DownloadRequest) (*DownloadResponse, error)
}
// UnimplementedDownloadServiceServer should be embedded to have forward compatible implementations.
type UnimplementedDownloadServiceServer struct {
}
func (UnimplementedDownloadServiceServer) Download(context.Context, *DownloadRequest) (*DownloadResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Download not implemented")
}
// UnsafeDownloadServiceServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to DownloadServiceServer will
// result in compilation errors.
type UnsafeDownloadServiceServer interface {
mustEmbedUnimplementedDownloadServiceServer()
}
func RegisterDownloadServiceServer(s grpc.ServiceRegistrar, srv DownloadServiceServer) |
func _DownloadService_Download_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DownloadRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DownloadServiceServer).Download(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/buf.alpha.registry.v1alpha1.DownloadService/Download",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DownloadServiceServer).Download(ctx, req.(*DownloadRequest))
}
return interceptor(ctx, in, info, handler)
}
// DownloadService_ServiceDesc is the grpc.ServiceDesc for DownloadService service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var DownloadService_ServiceDesc = grpc.ServiceDesc{
ServiceName: "buf.alpha.registry.v1alpha1.DownloadService",
HandlerType: (*DownloadServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Download",
Handler: _DownloadService_Download_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "buf/alpha/registry/v1alpha1/download.proto",
}
| {
s.RegisterService(&DownloadService_ServiceDesc, srv)
} |
mod.rs | /* Copyright (c) 2018-2021 Jeremy Davis ([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software
* and associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | //! This module comprises the kernel's virtual memory manager, abstracting both the physical memory
//! and the platform-specific details needed to map and swap pages of virtual memory.
pub mod paging; | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
|
bytepool_test.go | // Copyright 2015-2018 trivago N.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tcontainer
import (
"bytes"
"encoding/binary"
"github.com/trivago/tgo/ttesting"
"math/rand"
"sync"
"testing"
"time"
)
func TestBytePool(t *testing.T) {
expect := ttesting.NewExpect(t)
pool := NewBytePool()
tinyMin := pool.Get(1)
expect.Equal(tiny, cap(tinyMin))
expect.Equal(1, len(tinyMin))
tinyMax := pool.Get(tiny)
expect.Equal(tiny, len(tinyMax))
smallMin := pool.Get(tiny + 1)
expect.Equal(small, cap(smallMin))
expect.Equal(tiny+1, len(smallMin))
smallMax := pool.Get(small)
expect.Equal(small, len(smallMax))
mediumMin := pool.Get(small + 1)
expect.Equal(medium, cap(mediumMin))
expect.Equal(small+1, len(mediumMin))
mediumMax := pool.Get(medium)
expect.Equal(medium, len(mediumMax))
largeMin := pool.Get(medium + 1)
expect.Equal(large, cap(largeMin))
expect.Equal(medium+1, len(largeMin))
largeMax := pool.Get(large)
expect.Equal(large, len(largeMax))
hugeMin := pool.Get(large + 1)
expect.Equal(huge, cap(hugeMin))
expect.Equal(large+1, len(hugeMin))
hugeMax := pool.Get(huge)
expect.Equal(huge, len(hugeMax))
}
func TestBytePoolParallel(t *testing.T) {
expect := ttesting.NewExpect(t)
pool := NewBytePool()
start := new(sync.WaitGroup)
done := make(chan int)
allocate := func() {
start.Wait()
for {
select {
case <-done:
return
default:
pool.Get(rand.Intn(huge))
}
}
}
start.Add(1)
for i := 0; i < 100; i++ {
go expect.NoPanic(allocate)
}
start.Done()
time.Sleep(2 * time.Second)
close(done)
}
func TestBytePoolUnique(t *testing.T) | {
expect := ttesting.NewExpect(t)
pool := NewBytePool()
numTests := tinyCount * 10
chunks := make([][]byte, numTests)
for i := 0; i < numTests; i++ {
data := pool.Get(8)
binary.PutVarint(data, int64(i))
chunks[i] = data
}
for i := 0; i < numTests; i++ {
num, err := binary.ReadVarint(bytes.NewReader(chunks[i]))
expect.NoError(err)
expect.Equal(int64(i), num)
}
} |
|
origin.py | import os
import sys
from abc import ABC, abstractmethod
from collections import namedtuple
from dagster import check
from dagster.core.definitions.reconstructable import ReconstructableRepository
from dagster.core.types.loadable_target_origin import LoadableTargetOrigin
from dagster.serdes import create_snapshot_id, whitelist_for_serdes
# This is a hard-coded name for the special "in-process" location.
# This is typically only used for test, although we may allow
# users to load user code into a host process as well. We want
# to encourage the user code to be in user processes as much
# as possible since that it how this system will be used in prod.
# We used a hard-coded name so that we don't have to create
# made up names for this case.
IN_PROCESS_NAME = "<<in_process>>"
def _assign_grpc_location_name(port, socket, host):
check.opt_int_param(port, "port")
check.opt_str_param(socket, "socket")
check.str_param(host, "host")
check.invariant(port or socket)
return "grpc:{host}:{socket_or_port}".format(
host=host, socket_or_port=(socket if socket else port)
)
def _assign_loadable_target_origin_name(loadable_target_origin):
check.inst_param(loadable_target_origin, "loadable_target_origin", LoadableTargetOrigin)
file_or_module = (
loadable_target_origin.package_name
if loadable_target_origin.package_name
else (
loadable_target_origin.module_name
if loadable_target_origin.module_name
else os.path.basename(loadable_target_origin.python_file)
)
)
return (
"{file_or_module}:{attribute}".format(
file_or_module=file_or_module, attribute=loadable_target_origin.attribute
)
if loadable_target_origin.attribute
else file_or_module
)
class RepositoryLocationOrigin(ABC):
"""Serializable representation of a RepositoryLocation that can be used to
uniquely identify the location or reload it in across process boundaries.
"""
@property
def is_reload_supported(self):
return True
@abstractmethod
def get_cli_args(self):
pass
@abstractmethod
def get_display_metadata(self):
pass
def get_id(self):
return create_snapshot_id(self)
@whitelist_for_serdes
class InProcessRepositoryLocationOrigin(
namedtuple("_InProcessRepositoryLocationOrigin", "recon_repo"), RepositoryLocationOrigin,
):
"""Identifies a repository location constructed in the host process. Should only be
used in tests.
"""
def __new__(cls, recon_repo):
return super(InProcessRepositoryLocationOrigin, cls).__new__(
cls, check.inst_param(recon_repo, "recon_repo", ReconstructableRepository)
)
@property
def location_name(self):
return IN_PROCESS_NAME
def get_cli_args(self):
check.invariant(False, "Cannot get CLI args for an in process repository location")
@property
def is_reload_supported(self):
return False
def get_display_metadata(self):
|
@whitelist_for_serdes
class ManagedGrpcPythonEnvRepositoryLocationOrigin(
namedtuple(
"_ManagedGrpcPythonEnvRepositoryLocationOrigin", "loadable_target_origin location_name"
),
RepositoryLocationOrigin,
):
"""Identifies a repository location in a Python environment. Dagster creates a gRPC server
for these repository locations on startup.
"""
def __new__(cls, loadable_target_origin, location_name=None):
return super(ManagedGrpcPythonEnvRepositoryLocationOrigin, cls).__new__(
cls,
check.inst_param(
loadable_target_origin, "loadable_target_origin", LoadableTargetOrigin
),
check.str_param(location_name, "location_name")
if location_name
else _assign_loadable_target_origin_name(loadable_target_origin),
)
def get_cli_args(self):
return " ".join(self.loadable_target_origin.get_cli_args())
def get_display_metadata(self):
metadata = {
"python_file": self.loadable_target_origin.python_file,
"module_name": self.loadable_target_origin.module_name,
"working_directory": self.loadable_target_origin.working_directory,
"attribute": self.loadable_target_origin.attribute,
"package_name": self.loadable_target_origin.package_name,
"executable_path": (
self.loadable_target_origin.executable_path
if self.loadable_target_origin.executable_path != sys.executable
else None
),
}
return {key: value for key, value in metadata.items() if value is not None}
@whitelist_for_serdes
class GrpcServerRepositoryLocationOrigin(
namedtuple("_GrpcServerRepositoryLocationOrigin", "host port socket location_name"),
RepositoryLocationOrigin,
):
"""Identifies a repository location hosted in a gRPC server managed by the user. Dagster
is not responsible for managing the lifecycle of the server.
"""
def __new__(cls, host, port=None, socket=None, location_name=None):
return super(GrpcServerRepositoryLocationOrigin, cls).__new__(
cls,
check.str_param(host, "host"),
check.opt_int_param(port, "port"),
check.opt_str_param(socket, "socket"),
check.str_param(location_name, "location_name")
if location_name
else _assign_grpc_location_name(port, socket, host),
)
def get_cli_args(self):
if self.port:
return "--grpc-host {host} --grpc-port {port}".format(host=self.host, port=self.port)
else:
return "--grpc-host {host} --grpc-socket {socket}".format(
host=self.host, socket=self.socket
)
def get_display_metadata(self):
metadata = {"host": self.host, "port": self.port, "socket": self.socket}
return {key: value for key, value in metadata.items() if value is not None}
@whitelist_for_serdes
class ExternalRepositoryOrigin(
namedtuple("_ExternalRepositoryOrigin", "repository_location_origin repository_name")
):
"""Serializable representation of an ExternalRepository that can be used to
uniquely it or reload it in across process boundaries.
"""
def __new__(cls, repository_location_origin, repository_name):
return super(ExternalRepositoryOrigin, cls).__new__(
cls,
check.inst_param(
repository_location_origin, "repository_location_origin", RepositoryLocationOrigin
),
check.str_param(repository_name, "repository_name"),
)
def get_id(self):
return create_snapshot_id(self)
def get_pipeline_origin(self, pipeline_name):
return ExternalPipelineOrigin(self, pipeline_name)
def get_job_origin(self, job_name):
return ExternalJobOrigin(self, job_name)
def get_partition_set_origin(self, partition_set_name):
return ExternalPartitionSetOrigin(self, partition_set_name)
def get_cli_args(self):
return self.repository_location_origin.get_cli_args() + " -r " + self.repository_name
@whitelist_for_serdes
class ExternalPipelineOrigin(
namedtuple("_ExternalPipelineOrigin", "external_repository_origin pipeline_name")
):
"""Serializable representation of an ExternalPipeline that can be used to
uniquely it or reload it in across process boundaries.
"""
def __new__(cls, external_repository_origin, pipeline_name):
return super(ExternalPipelineOrigin, cls).__new__(
cls,
check.inst_param(
external_repository_origin, "external_repository_origin", ExternalRepositoryOrigin,
),
check.str_param(pipeline_name, "pipeline_name"),
)
def get_repo_cli_args(self):
return self.external_repository_origin.get_cli_args()
def get_id(self):
return create_snapshot_id(self)
@whitelist_for_serdes
class ExternalJobOrigin(namedtuple("_ExternalJobOrigin", "external_repository_origin job_name")):
"""Serializable representation of an ExternalJob that can be used to
uniquely it or reload it in across process boundaries.
"""
def __new__(cls, external_repository_origin, job_name):
return super(ExternalJobOrigin, cls).__new__(
cls,
check.inst_param(
external_repository_origin, "external_repository_origin", ExternalRepositoryOrigin,
),
check.str_param(job_name, "job_name"),
)
def get_repo_cli_args(self):
return self.external_repository_origin.get_cli_args()
def get_id(self):
return create_snapshot_id(self)
@whitelist_for_serdes
class ExternalPartitionSetOrigin(
namedtuple("_PartitionSetOrigin", "external_repository_origin partition_set_name")
):
"""Serializable representation of an ExternalPartitionSet that can be used to
uniquely it or reload it in across process boundaries.
"""
def __new__(cls, external_repository_origin, partition_set_name):
return super(ExternalPartitionSetOrigin, cls).__new__(
cls,
check.inst_param(
external_repository_origin, "external_repository_origin", ExternalRepositoryOrigin,
),
check.str_param(partition_set_name, "partition_set_name"),
)
def get_repo_cli_args(self):
return self.external_repository_origin.get_cli_args()
def get_id(self):
return create_snapshot_id(self)
| return {
"in_process_code_pointer": self.recon_repo.pointer.describe(),
} |
admin.py |
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
# Register your models here.
from .models.person import Person
from .models.user import User
from .models.hierarchy_type import HierarchyType
from .models.hierarchy import Hierarchy
from .models.menu import Menu
from .models.user_hierarchy_group import UserHierarchyGroup
from .models.user_hierarchy_permission import UserHierarchyPermission
from .models.person import Religion, Ethnicity, Occupation, EducationLevel
from .models.person import PensionScheme
from .models.person_address import PersonAddressType, PersonAddress
from .models.person_document import DocumentType, PersonDocument
from .models.person_phone import PersonPhoneType, PersonPhone
admin.site.register(ContentType)
class PermissionAdmin(admin.ModelAdmin):
list_display = ("codename", "name", "content_type")
search_fields = ("codename", "name", "content_type__app_label")
admin.site.register(Permission, PermissionAdmin)
'''
admin.site.register(Hierarchy)
admin.site.register(HierarchyType)
admin.site.register(UserHierarchyGroup)
admin.site.register(UserHierarchyPermission)
admin.site.register(Menu)
'''
admin.site.register(Person)
admin.site.register(Religion)
admin.site.register(Ethnicity)
admin.site.register(Occupation)
admin.site.register(EducationLevel)
admin.site.register(PensionScheme)
admin.site.register(PersonAddressType)
admin.site.register(PersonAddress)
admin.site.register(DocumentType)
admin.site.register(PersonDocument)
admin.site.register(PersonPhoneType)
admin.site.register(PersonPhone)
# forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth import get_user_model
CHOICES = (('ON', 'ON'),
('OFF', 'OFF'),
)
class MyUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = User # get_user_model()
class MyUserChangeForm(UserChangeForm):
description = forms.CharField(
label=_('Description'), required=False, initial='edit',
widget=forms.Textarea)
# is_staff = forms.ChoiceField(widget=forms.RadioSelect, choices=CHOICES)
class | (UserChangeForm.Meta):
model = User # get_user_model()
class MyUserAdmin(UserAdmin):
""" """
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'),
{'fields': ('email',)}),
(_('Permissions'), {'fields': ('is_active', 'description', 'is_staff',
'is_superuser', 'groups',
'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
form = MyUserChangeForm
add_form = MyUserCreationForm
list_display = ('username', 'email',
'first_name', 'last_name', 'is_staff', ) # 'status'
list_filter = ('is_staff', 'is_superuser',
'is_active', 'groups', 'date_joined')
#date_hierarchy = 'date_joined'
def status(self, obj):
return obj.status
status.admin_order_field = 'status'
status.short_description = 'status'
# raw_id_fields = ('person',)
'''
def save_model(self, request, obj, form, change):
if obj.pk:
if obj.is_active:
if UserStatus.objects.filter(user=obj.pk).count() > 0:
if UserStatus.objects.filter(user=obj.pk).latest('id').status != ON:
UserStatus.objects.create(
status=ON,
description=form.cleaned_data['description'], user=obj)
else: # no tiene registros en UserStatus
UserStatus.objects.create(
status=ON,
description=form.cleaned_data['description'], user=obj)
else:
if UserStatus.objects.filter(user=obj.pk).count() > 0:
if UserStatus.objects.filter(user=obj.pk).latest('id').status != OFF:
UserStatus.objects.create(
status=OFF,
description=form.cleaned_data['description'], user=obj)
else:
UserStatus.objects.create(
status=OFF,
description=form.cleaned_data['description'], user=obj)
obj.save()
'''
def get_queryset(self, request):
qs = super(MyUserAdmin, self).get_queryset(request)
# qr = qs.with_status() # add 'status' colum
# print qr
return qs
'''
def formfield_for_choice_field(self, db_field, request, **kwargs):
if db_field.name == 'status':
kwargs['choices'] = (
(ON, 'Accepted'),
(OFF, 'Denied'),
(True, 'Denied'),
(False, 'Denied'),
(None, 'Denied'),
(0, 'Denied'),
(1, 'Denied'),
('0', 'Denied'),
('1', 'Denied'),
('True', 'Denied'),
('False', 'Denied'),
)
# db_field['status'].choices = (
# (ON, 'Accepted'),
# (OFF, 'Denied'),
# )
return super(MyUserAdmin, self).formfield_for_choice_field(db_field,
request, **kwargs)
'''
admin.site.register(User, MyUserAdmin)
| Meta |
build.rs | // Copyright 2021 Contributors to the Confidential Packaging project.
// SPDX-License-Identifier: MIT
//! Builds a confidential package from a compiled application binary and
//! a given set of inputs to control signing and encryption.
use crate::error::{Error, Result, ToolErrorKind};
use crate::prototype::builder::simple_build_from_payload;
use crate::util::get_config_from_command_or_env;
use cpk::keys::file::FileKeySource;
use cpk::keys::http::WebContractKeySource;
use cpk::keys::local::LocalMemoryKeyPair;
use cpk::keys::parsec::ParsecKeyPair;
use cpk::keys::EncryptionKeySource;
use structopt::StructOpt;
/// Models the options required by the build command.
#[derive(Debug, StructOpt)]
pub struct Build {
/// The primary input file, which contains the content that needs to be packaged
/// by the build process.
#[structopt(short = "i", long = "input-payload")]
input_payload: String,
/// The output file, which will contain the built package once the process
/// completes.
#[structopt(short = "o", long = "output-package")]
output_package: String,
/// The identity of the application whose class key will be used for encryption. This would
/// typically be a UUID in string form, although it can be any suitable identifier provided that
/// a class encryption key with this name is contained in the referenced key store.
#[structopt(short = "a", long = "application-id")]
application_id: String,
/// The encryption key method to use, either "http" or "file".
#[structopt(short = "e", long = "encryption-key-method")]
encryption_key_method: String,
/// The human-readable name of the application.
#[structopt(short = "n", long = "application-name", default_value = "Unknown")]
application_name: String,
/// The vendor or service provider identifier string.
#[structopt(short = "v", long = "vendor", default_value = "Unknown")]
vendor: String,
/// The address of the key store. This argument is optional, and it's interpretation depends
/// on the encryption key method.
///
/// When the encryption key method is `http`, this argument should be the URL
/// of an API endpoint that implements the Confidential Package Key Wrapping protocol. If it is
/// omitted from the command-line, then this URL will be read from the
/// `CP_CLOUD_KEY_SOURCE` environment variable instead. If it is not specified there either, then the
/// command will fail.
///
/// When the encryption key method is `file`, this argument should be the path to an existing
/// file on disk, containing the key data in JSON format. File-based key stores are intended for
/// local test environments only. If this argument is omitted from the command-line, it will
/// be resolved from the CP_FILE_KEY_SOURCE environment variable instead. If it is not specified
/// there either, then the command will fail.
#[structopt(short = "k", long = "key-store")]
key_store: Option<String>,
/// The wrapping key method to use: either "local" or "parsec".
#[structopt(short = "w", long = "wrapping-key-method")]
wrapping_key_method: String,
}
impl Build {
/// Builds the package with the given encryption key source.
fn build_with_encryption_key_source<S : EncryptionKeySource>(&self, eks: &S) -> Result<()> {
match self.wrapping_key_method.as_str() {
"local" => {
println!("WARNING: Local memory RSA wrapping keys should only be used in dev/test environments.");
println!(" Consider using Parsec to manage the wrapping key using the best-available security facilities of your platform.");
let local_wrapping = LocalMemoryKeyPair::default();
simple_build_from_payload(
&self.application_id,
&self.application_name,
&self.vendor,
&self.input_payload,
&self.output_package,
eks,
&local_wrapping,
)
},
"parsec" => {
let parsec_wrapping = ParsecKeyPair::default();
simple_build_from_payload(
&self.application_id,
&self.application_name,
&self.vendor,
&self.input_payload,
&self.output_package,
eks,
&parsec_wrapping,
)
},
_ => Err(Error::ToolError(ToolErrorKind::InvalidWrappingKeySource)),
}
}
/// Builds the confidential package from the given inputs.
pub fn run(&self) -> Result<()> |
}
| {
match self.encryption_key_method.as_str() {
"http" => {
let endpoint = get_config_from_command_or_env(
&self.key_store,
"CP_CLOUD_KEY_SOURCE",
"HTTP key store endpoint",
)?;
let wks = WebContractKeySource::from_endpoint_uri(&endpoint);
self.build_with_encryption_key_source(&wks)
},
"file" => {
let path = get_config_from_command_or_env(
&self.key_store,
"CP_FILE_KEY_SOURCE",
"key store file path",
)?;
let fks = FileKeySource::from_file_path(&path)?;
self.build_with_encryption_key_source(&fks)
},
_ => Err(Error::ToolError(ToolErrorKind::InvalidEncryptionKeySource)),
}
} |
memory_test.go | package session_test
import (
"fmt"
"testing"
"time"
"github.com/infraboard/keyauth/client/session"
"github.com/infraboard/keyauth/pkg/token"
)
func TestMemStore(t *testing.T) | {
s := session.NewMemoryStore()
tk := &token.Token{
AccessToken: "abc",
}
s.SetToken(tk)
go func() {
tk := s.LeaseToken("abc")
s.ReturnToken(tk)
}()
go func() {
tk := s.LeaseToken("abc")
s.ReturnToken(tk)
}()
time.Sleep(2 * time.Second)
s.ReturnToken(tk)
fmt.Println(s.GetToken("abc"))
} |
|
simplekvtest.go | package main
import (
"bytes"
"context"
"fmt"
"time"
"github.com/oasisprotocol/oasis-core/go/common/cbor"
"github.com/oasisprotocol/oasis-core/go/common/logging"
sdk "github.com/oasisprotocol/oasis-sdk/client-sdk/go"
"github.com/oasisprotocol/oasis-sdk/client-sdk/go/client"
"github.com/oasisprotocol/oasis-sdk/client-sdk/go/crypto/signature"
"github.com/oasisprotocol/oasis-sdk/client-sdk/go/testing"
"github.com/oasisprotocol/oasis-sdk/client-sdk/go/types"
)
// EventWaitTimeout specifies how long to wait for an event.
const EventWaitTimeout = 20 * time.Second
// The kvKey type must match the Key type from the simple-keyvalue runtime
// in ../runtimes/simple-keyvalue/src/keyvalue/types.rs.
type kvKey struct {
Key []byte `json:"key"`
}
// The kvKeyValue type must match the KeyValue type from the simple-keyvalue
// runtime in ../runtimes/simple-keyvalue/src/keyvalue/types.rs.
type kvKeyValue struct {
Key []byte `json:"key"`
Value []byte `json:"value"`
}
// The kvInsertEvent type must match the Event::Insert type from the
// simple-keyvalue runtime in ../runtimes/simple-keyvalue/src/keyvalue.rs.
type kvInsertEvent struct {
KV kvKeyValue `json:"kv"`
}
var kvInsertEventKey = sdk.NewEventKey("keyvalue", 1)
// The kvRemoveEvent type must match the Event::Remove type from the
// simple-keyvalue runtime in ../runtimes/simple-keyvalue/src/keyvalue.rs.
type kvRemoveEvent struct {
Key kvKey `json:"key"`
}
var kvRemoveEventKey = sdk.NewEventKey("keyvalue", 2)
// GetChainContext returns the chain context.
func GetChainContext(ctx context.Context, rtc client.RuntimeClient) (signature.Context, error) {
info, err := rtc.GetInfo(ctx)
if err != nil {
return "", err
}
return info.ChainContext, nil
}
// kvInsert inserts given key-value pair into storage.
func kvInsert(rtc client.RuntimeClient, signer signature.Signer, nonce uint64, key []byte, value []byte) error {
ctx := context.Background()
chainCtx, err := GetChainContext(ctx, rtc)
if err != nil {
return err
}
tx := types.NewTransaction(nil, "keyvalue.Insert", kvKeyValue{
Key: key,
Value: value,
})
tx.AppendSignerInfo(signer.Public(), nonce)
stx := tx.PrepareForSigning()
stx.AppendSign(chainCtx, signer)
if _, err = rtc.SubmitTx(ctx, stx.UnverifiedTransaction()); err != nil {
return err
}
return nil
}
// kvRemove removes given key from storage.
func kvRemove(rtc client.RuntimeClient, signer signature.Signer, nonce uint64, key []byte) error {
ctx := context.Background()
chainCtx, err := GetChainContext(ctx, rtc)
if err != nil {
return err
}
tx := types.NewTransaction(nil, "keyvalue.Remove", kvKey{
Key: key,
})
tx.AppendSignerInfo(signer.Public(), nonce)
stx := tx.PrepareForSigning()
stx.AppendSign(chainCtx, signer)
if _, err := rtc.SubmitTx(ctx, stx.UnverifiedTransaction()); err != nil {
return err
}
return nil
}
// kvGet gets given key's value from storage.
func kvGet(rtc client.RuntimeClient, key []byte) ([]byte, error) {
ctx := context.Background()
var resp kvKeyValue
if err := rtc.Query(ctx, client.RoundLatest, "keyvalue.Get", kvKey{Key: key}, &resp); err != nil {
return nil, err
}
return resp.Value, nil
}
func SimpleKVTest(log *logging.Logger, rtc client.RuntimeClient) error {
signer := testing.Alice.Signer
testKey := []byte("test_key")
testValue := []byte("test_value")
log.Info("inserting test key")
if err := kvInsert(rtc, signer, 0, testKey, testValue); err != nil {
return err
}
log.Info("fetching test key")
val, err := kvGet(rtc, testKey)
if err != nil {
return err
}
if !bytes.Equal(val, testValue) {
return fmt.Errorf("fetched value does not match inserted value")
}
log.Info("removing test key")
if err := kvRemove(rtc, signer, 1, testKey); err != nil {
return err
}
log.Info("fetching removed key should fail")
_, err = kvGet(rtc, testKey)
if err == nil {
return fmt.Errorf("fetching removed key should fail")
}
return nil
}
func KVEventTest(log *logging.Logger, rtc client.RuntimeClient) error {
signer := testing.Alice.Signer
testKey := []byte("event_test_key")
testValue := []byte("event_test_value")
// Subscribe to blocks.
ctx := context.Background()
blkCh, blkSub, err := rtc.WatchBlocks(ctx)
if err != nil {
return err
}
defer blkSub.Close()
log.Info("inserting test key")
if err := kvInsert(rtc, signer, 0, testKey, testValue); err != nil {
return err
}
log.Info("waiting for insert event")
var gotEvent bool
WaitInsertLoop:
for {
select {
case <-ctx.Done():
return fmt.Errorf("context terminated")
case <-time.After(EventWaitTimeout):
return fmt.Errorf("timed out")
case blk, ok := <-blkCh:
if !ok {
return fmt.Errorf("failed to get block from channel")
}
events, err := rtc.GetEvents(ctx, blk.Block.Header.Round)
if err != nil {
log.Error("failed to get events",
"err", err,
"round", blk.Block.Header.Round,
)
return err
}
for _, ev := range events {
switch {
case kvInsertEventKey.IsEqual(ev.Key):
var ie kvInsertEvent
if err = cbor.Unmarshal(ev.Value, &ie); err != nil {
log.Error("failed to unmarshal insert event",
"err", err,
)
continue
}
if bytes.Equal(ie.KV.Key, testKey) && bytes.Equal(ie.KV.Value, testValue) {
gotEvent = true
log.Info("got our insert event")
break WaitInsertLoop
}
default:
}
}
}
}
if !gotEvent {
return fmt.Errorf("didn't get insert event")
}
log.Info("removing test key")
if err := kvRemove(rtc, signer, 1, testKey); err != nil {
return err
}
log.Info("waiting for remove event")
gotEvent = false
WaitRemoveLoop:
for {
select {
case <-ctx.Done():
return fmt.Errorf("context terminated")
case <-time.After(EventWaitTimeout):
return fmt.Errorf("timed out")
case blk, ok := <-blkCh:
if !ok {
return fmt.Errorf("failed to get block from channel")
}
events, err := rtc.GetEvents(ctx, blk.Block.Header.Round)
if err != nil |
for _, ev := range events {
switch {
case kvRemoveEventKey.IsEqual(ev.Key):
var re kvRemoveEvent
if err = cbor.Unmarshal(ev.Value, &re); err != nil {
log.Error("failed to unmarshal remove event",
"err", err,
)
continue
}
if bytes.Equal(re.Key.Key, testKey) {
gotEvent = true
log.Info("got our remove event")
break WaitRemoveLoop
}
default:
}
}
}
}
if !gotEvent {
return fmt.Errorf("didn't get remove event")
}
return nil
}
| {
log.Error("failed to get events",
"err", err,
"round", blk.Block.Header.Round,
)
return err
} |
array_create_node.rs | use crate::ast::{node::ArrayCreateNode, NodeResult};
use crate::{crocoi::CrocoiNode, error::CrocoError};
use {
crate::crocoi::{symbol::get_symbol_type, symbol::Array, ICodegen, INodeResult, ISymbol},
std::cell::RefCell,
std::rc::Rc,
};
impl CrocoiNode for ArrayCreateNode {
fn | (&mut self, codegen: &mut ICodegen) -> Result<INodeResult, CrocoError> {
// don't allow empty array declarations
// people should use
// let a [num] and not let a [num] = []
if self.contents.is_empty() {
return Err(CrocoError::empty_array_error(&self.code_pos));
}
// visit all array elements
let mut visited = Vec::with_capacity(self.contents.len());
for el in &mut self.contents {
visited.push(el.crocoi(codegen)?.into_symbol(&self.code_pos)?);
}
// infer the array type from the first element
let array_type = get_symbol_type(&visited[0]);
// make sure all elements are of the same type and wrap them in Rcs
let mut visited_rc = Vec::with_capacity(self.contents.len());
for el in visited.into_iter() {
let el_type = get_symbol_type(&el);
if el_type != array_type {
return Err(CrocoError::mixed_type_array(&self.code_pos));
}
visited_rc.push(Rc::new(RefCell::new(el)))
}
let array = Array {
contents: visited_rc,
array_type: Box::new(array_type),
};
Ok(NodeResult::Value(ISymbol::Array(array)))
}
}
| crocoi |
setup.py | #!/usr/bin/python -u
#
# Setup script for libxml2 and libxslt if found
#
import sys, os
from distutils.core import setup, Extension
# Below ROOT, we expect to find include, include/libxml2, lib and bin.
# On *nix, it is not needed (but should not harm),
# on Windows, it is set by configure.js.
ROOT = r'/Users/emsommers/Documents/GitHub/futureDocs/vendor/bundle/ruby/2.3.0/gems/nokogiri-1.10.10/ports/x86_64-apple-darwin17/libxml2/2.9.10'
# Thread-enabled libxml2
with_threads = 1
# If this flag is set (windows only),
# a private copy of the dlls are included in the package.
# If this flag is not set, the libxml2 and libxslt
# dlls must be found somewhere in the PATH at runtime.
WITHDLLS = 1 and sys.platform.startswith('win')
def missing(file):
if os.access(file, os.R_OK) == 0:
return 1
return 0
try:
HOME = os.environ['HOME']
except:
HOME="C:"
if WITHDLLS:
# libxml dlls (expected in ROOT/bin)
dlls = [ 'iconv.dll','libxml2.dll','libxslt.dll','libexslt.dll' ]
dlls = [os.path.join(ROOT,'bin',dll) for dll in dlls]
# create __init__.py for the libxmlmods package
if not os.path.exists("libxmlmods"):
os.mkdir("libxmlmods")
open("libxmlmods/__init__.py","w").close()
def altImport(s):
|
if sys.platform.startswith('win'):
libraryPrefix = 'lib'
platformLibs = []
else:
libraryPrefix = ''
platformLibs = ["m","z"]
# those are examined to find
# - libxml2/libxml/tree.h
# - iconv.h
# - libxslt/xsltconfig.h
includes_dir = [
"/usr/include",
"/usr/local/include",
"/opt/include",
os.path.join(ROOT,'include'),
HOME
];
xml_includes=""
for dir in includes_dir:
if not missing(dir + "/libxml2/libxml/tree.h"):
xml_includes=dir + "/libxml2"
break;
if xml_includes == "":
print("failed to find headers for libxml2: update includes_dir")
sys.exit(1)
iconv_includes=""
for dir in includes_dir:
if not missing(dir + "/iconv.h"):
iconv_includes=dir
break;
if iconv_includes == "":
print("failed to find headers for libiconv: update includes_dir")
sys.exit(1)
# those are added in the linker search path for libraries
libdirs = [
os.path.join(ROOT,'lib'),
]
xml_files = ["libxml2-api.xml", "libxml2-python-api.xml",
"libxml.c", "libxml.py", "libxml_wrap.h", "types.c",
"xmlgenerator.py", "README", "TODO", "drv_libxml2.py"]
xslt_files = ["libxslt-api.xml", "libxslt-python-api.xml",
"libxslt.c", "libxsl.py", "libxslt_wrap.h",
"xsltgenerator.py"]
if missing("libxml2-py.c") or missing("libxml2.py"):
try:
try:
import xmlgenerator
except:
import generator
except:
print("failed to find and generate stubs for libxml2, aborting ...")
print(sys.exc_info()[0], sys.exc_info()[1])
sys.exit(1)
head = open("libxml.py", "r")
generated = open("libxml2class.py", "r")
result = open("libxml2.py", "w")
for line in head.readlines():
if WITHDLLS:
result.write(altImport(line))
else:
result.write(line)
for line in generated.readlines():
result.write(line)
head.close()
generated.close()
result.close()
with_xslt=0
if missing("libxslt-py.c") or missing("libxslt.py"):
if missing("xsltgenerator.py") or missing("libxslt-api.xml"):
print("libxslt stub generator not found, libxslt not built")
else:
try:
import xsltgenerator
except:
print("failed to generate stubs for libxslt, aborting ...")
print(sys.exc_info()[0], sys.exc_info()[1])
else:
head = open("libxsl.py", "r")
generated = open("libxsltclass.py", "r")
result = open("libxslt.py", "w")
for line in head.readlines():
if WITHDLLS:
result.write(altImport(line))
else:
result.write(line)
for line in generated.readlines():
result.write(line)
head.close()
generated.close()
result.close()
with_xslt=1
else:
with_xslt=1
if with_xslt == 1:
xslt_includes=""
for dir in includes_dir:
if not missing(dir + "/libxslt/xsltconfig.h"):
xslt_includes=dir + "/libxslt"
break;
if xslt_includes == "":
print("failed to find headers for libxslt: update includes_dir")
with_xslt = 0
descr = "libxml2 package"
modules = [ 'libxml2', 'drv_libxml2' ]
if WITHDLLS:
modules.append('libxmlmods.__init__')
c_files = ['libxml2-py.c', 'libxml.c', 'types.c' ]
includes= [xml_includes, iconv_includes]
libs = [libraryPrefix + "xml2"] + platformLibs
macros = []
if with_threads:
macros.append(('_REENTRANT','1'))
if with_xslt == 1:
descr = "libxml2 and libxslt package"
if not sys.platform.startswith('win'):
#
# We are gonna build 2 identical shared libs with merge initializing
# both libxml2mod and libxsltmod
#
c_files = c_files + ['libxslt-py.c', 'libxslt.c']
xslt_c_files = c_files
macros.append(('MERGED_MODULES', '1'))
else:
#
# On windows the MERGED_MODULE option is not needed
# (and does not work)
#
xslt_c_files = ['libxslt-py.c', 'libxslt.c', 'types.c']
libs.insert(0, libraryPrefix + 'exslt')
libs.insert(0, libraryPrefix + 'xslt')
includes.append(xslt_includes)
modules.append('libxslt')
extens=[Extension('libxml2mod', c_files, include_dirs=includes,
library_dirs=libdirs,
libraries=libs, define_macros=macros)]
if with_xslt == 1:
extens.append(Extension('libxsltmod', xslt_c_files, include_dirs=includes,
library_dirs=libdirs,
libraries=libs, define_macros=macros))
if missing("MANIFEST"):
manifest = open("MANIFEST", "w")
manifest.write("setup.py\n")
for file in xml_files:
manifest.write(file + "\n")
if with_xslt == 1:
for file in xslt_files:
manifest.write(file + "\n")
manifest.close()
if WITHDLLS:
ext_package = "libxmlmods"
if sys.version >= "2.2":
base = "lib/site-packages/"
else:
base = ""
data_files = [(base+"libxmlmods",dlls)]
else:
ext_package = None
data_files = []
setup (name = "libxml2-python",
# On *nix, the version number is created from setup.py.in
# On windows, it is set by configure.js
version = "2.9.10",
description = descr,
author = "Daniel Veillard",
author_email = "[email protected]",
url = "http://xmlsoft.org/python.html",
licence="MIT Licence",
py_modules=modules,
ext_modules=extens,
ext_package=ext_package,
data_files=data_files,
)
sys.exit(0)
| s = s.replace("import libxml2mod","from libxmlmods import libxml2mod")
s = s.replace("import libxsltmod","from libxmlmods import libxsltmod")
return s |
fibonacci.py | def solution(number): # O(N)
"""
Write a function to compute the fibonacci sequence value to the requested iteration.
>>> solution(3)
2
>>> solution(10)
55
>>> solution(20)
6765
"""
m = {
0: 0,
1: 1
} # O(1)
def run_sequence(n): # O(N)
|
return run_sequence(number) # O(N)
if __name__ == '__main__':
import doctest
doctest.testmod()
| if not isinstance(m.get(n), int): # O(1)
m[n] = run_sequence(n - 1) + run_sequence(n - 2) # O(N)
return m[n] # O(1) |
invvect_test.go | // Copyright (c) 2013-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package wire
import (
"bytes"
"reflect"
"testing"
"github.com/Messer4/btcd/chaincfg/chainhash"
"github.com/davecgh/go-spew/spew"
)
// TestInvVectStringer tests the stringized output for inventory vector types.
func | (t *testing.T) {
tests := []struct {
in InvType
want string
}{
{InvTypeError, "ERROR"},
{InvTypeTx, "MSG_TX"},
{InvTypeBlock, "MSG_BLOCK"},
{0xffffffff, "Unknown InvType (4294967295)"},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
result := test.in.String()
if result != test.want {
t.Errorf("String #%d\n got: %s want: %s", i, result,
test.want)
continue
}
}
}
// TestInvVect tests the InvVect API.
func TestInvVect(t *testing.T) {
ivType := InvTypeBlock
hash := chainhash.Hash{}
// Ensure we get the same payload and signature back out.
iv := NewInvVect(ivType, &hash)
if iv.Type != ivType {
t.Errorf("NewInvVect: wrong type - got %v, want %v",
iv.Type, ivType)
}
if !iv.Hash.IsEqual(&hash) {
t.Errorf("NewInvVect: wrong hash - got %v, want %v",
spew.Sdump(iv.Hash), spew.Sdump(hash))
}
}
// TestInvVectWire tests the InvVect wire encode and decode for various
// protocol versions and supported inventory vector types.
func TestInvVectWire(t *testing.T) {
// Block 203707 hash.
hashStr := "3264bc2ac36a60840790ba1d475d01367e7c723da941069e9dc"
baseHash, err := chainhash.NewHashFromStr(hashStr)
if err != nil {
t.Errorf("NewHashFromStr: %v", err)
}
// errInvVect is an inventory vector with an error.
errInvVect := InvVect{
Type: InvTypeError,
Hash: chainhash.Hash{},
}
// errInvVectEncoded is the wire encoded bytes of errInvVect.
errInvVectEncoded := []byte{
0x00, 0x00, 0x00, 0x00, // InvTypeError
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // No hash
}
// txInvVect is an inventory vector representing a transaction.
txInvVect := InvVect{
Type: InvTypeTx,
Hash: *baseHash,
}
// txInvVectEncoded is the wire encoded bytes of txInvVect.
txInvVectEncoded := []byte{
0x01, 0x00, 0x00, 0x00, // InvTypeTx
0xdc, 0xe9, 0x69, 0x10, 0x94, 0xda, 0x23, 0xc7,
0xe7, 0x67, 0x13, 0xd0, 0x75, 0xd4, 0xa1, 0x0b,
0x79, 0x40, 0x08, 0xa6, 0x36, 0xac, 0xc2, 0x4b,
0x26, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Block 203707 hash
}
// blockInvVect is an inventory vector representing a block.
blockInvVect := InvVect{
Type: InvTypeBlock,
Hash: *baseHash,
}
// blockInvVectEncoded is the wire encoded bytes of blockInvVect.
blockInvVectEncoded := []byte{
0x02, 0x00, 0x00, 0x00, // InvTypeBlock
0xdc, 0xe9, 0x69, 0x10, 0x94, 0xda, 0x23, 0xc7,
0xe7, 0x67, 0x13, 0xd0, 0x75, 0xd4, 0xa1, 0x0b,
0x79, 0x40, 0x08, 0xa6, 0x36, 0xac, 0xc2, 0x4b,
0x26, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Block 203707 hash
}
tests := []struct {
in InvVect // NetAddress to encode
out InvVect // Expected decoded NetAddress
buf []byte // Wire encoding
pver uint32 // Protocol version for wire encoding
}{
// Latest protocol version error inventory vector.
{
errInvVect,
errInvVect,
errInvVectEncoded,
ProtocolVersion,
},
// Latest protocol version tx inventory vector.
{
txInvVect,
txInvVect,
txInvVectEncoded,
ProtocolVersion,
},
// Latest protocol version block inventory vector.
{
blockInvVect,
blockInvVect,
blockInvVectEncoded,
ProtocolVersion,
},
// Protocol version BIP0035Version error inventory vector.
{
errInvVect,
errInvVect,
errInvVectEncoded,
BIP0035Version,
},
// Protocol version BIP0035Version tx inventory vector.
{
txInvVect,
txInvVect,
txInvVectEncoded,
BIP0035Version,
},
// Protocol version BIP0035Version block inventory vector.
{
blockInvVect,
blockInvVect,
blockInvVectEncoded,
BIP0035Version,
},
// Protocol version BIP0031Version error inventory vector.
{
errInvVect,
errInvVect,
errInvVectEncoded,
BIP0031Version,
},
// Protocol version BIP0031Version tx inventory vector.
{
txInvVect,
txInvVect,
txInvVectEncoded,
BIP0031Version,
},
// Protocol version BIP0031Version block inventory vector.
{
blockInvVect,
blockInvVect,
blockInvVectEncoded,
BIP0031Version,
},
// Protocol version NetAddressTimeVersion error inventory vector.
{
errInvVect,
errInvVect,
errInvVectEncoded,
NetAddressTimeVersion,
},
// Protocol version NetAddressTimeVersion tx inventory vector.
{
txInvVect,
txInvVect,
txInvVectEncoded,
NetAddressTimeVersion,
},
// Protocol version NetAddressTimeVersion block inventory vector.
{
blockInvVect,
blockInvVect,
blockInvVectEncoded,
NetAddressTimeVersion,
},
// Protocol version MultipleAddressVersion error inventory vector.
{
errInvVect,
errInvVect,
errInvVectEncoded,
MultipleAddressVersion,
},
// Protocol version MultipleAddressVersion tx inventory vector.
{
txInvVect,
txInvVect,
txInvVectEncoded,
MultipleAddressVersion,
},
// Protocol version MultipleAddressVersion block inventory vector.
{
blockInvVect,
blockInvVect,
blockInvVectEncoded,
MultipleAddressVersion,
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Encode to wire format.
var buf bytes.Buffer
err := writeInvVect(&buf, test.pver, &test.in)
if err != nil {
t.Errorf("writeInvVect #%d error %v", i, err)
continue
}
if !bytes.Equal(buf.Bytes(), test.buf) {
t.Errorf("writeInvVect #%d\n got: %s want: %s", i,
spew.Sdump(buf.Bytes()), spew.Sdump(test.buf))
continue
}
// Decode the message from wire format.
var iv InvVect
rbuf := bytes.NewReader(test.buf)
err = readInvVect(rbuf, test.pver, &iv)
if err != nil {
t.Errorf("readInvVect #%d error %v", i, err)
continue
}
if !reflect.DeepEqual(iv, test.out) {
t.Errorf("readInvVect #%d\n got: %s want: %s", i,
spew.Sdump(iv), spew.Sdump(test.out))
continue
}
}
}
| TestInvTypeStringer |
test_architectures.py | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from ppdet.modeling.tests.decorator_helper import prog_scope
from ppdet.core.workspace import load_config, merge_config, create
from ppdet.modeling.model_input import create_feed
class TestFasterRCNN(unittest.TestCase):
def setUp(self):
self.set_config()
self.cfg = load_config(self.cfg_file)
self.detector_type = self.cfg['architecture']
def set_config(self):
self.cfg_file = 'configs/faster_rcnn_r50_1x.yml'
@prog_scope()
def test_train(self):
model = create(self.detector_type)
inputs_def = self.cfg['TrainReader']['inputs_def']
inputs_def['image_shape'] = [3, None, None]
feed_vars, _ = model.build_inputs(**inputs_def)
train_fetches = model.train(feed_vars)
@prog_scope()
def test_test(self):
inputs_def = self.cfg['EvalReader']['inputs_def']
inputs_def['image_shape'] = [3, None, None]
model = create(self.detector_type)
feed_vars, _ = model.build_inputs(**inputs_def)
test_fetches = model.eval(feed_vars)
class TestMaskRCNN(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/mask_rcnn_r50_1x.yml'
class TestCascadeRCNN(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/cascade_rcnn_r50_fpn_1x.yml'
class TestYolov3(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/yolov3_darknet.yml'
class TestRetinaNet(TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/retinanet_r50_fpn_1x.yml'
class | (TestFasterRCNN):
def set_config(self):
self.cfg_file = 'configs/ssd/ssd_mobilenet_v1_voc.yml'
if __name__ == '__main__':
unittest.main()
| TestSSD |
sortProperties.ts | import sortBy from 'lodash-es/sortBy';
import toPairs from 'lodash-es/toPairs';
import fromPairs from 'lodash-es/fromPairs';
/* 对root.properties进行排序 */
function sortProper | s: object): object {
const propertiesArr: Array<[string, any]> = (Object.entries || toPairs)(properties);
const sortPropertiesArr: Array<[string, any]> = sortBy(propertiesArr, function(o: [string, any]): number {
return o[1].$order ?? 0;
});
// @ts-ignore
return (Object.fromEntries || fromPairs)(sortPropertiesArr);
}
export default sortProperties; | ties(propertie |
customizations_test.go | package route53_test
import (
"strings"
"testing"
"github.com/ClearcodeHQ/aws-sdk-go/aws"
"github.com/ClearcodeHQ/aws-sdk-go/awstesting/unit"
"github.com/ClearcodeHQ/aws-sdk-go/service/route53"
)
func | (t *testing.T) {
svc := route53.New(unit.Session)
svc.Handlers.Validate.Clear()
req, _ := svc.GetHostedZoneRequest(&route53.GetHostedZoneInput{
Id: aws.String("/hostedzone/ABCDEFG"),
})
expectPath := strings.Replace(req.Operation.HTTPPath, "{Id}", "ABCDEFG", -1)
req.HTTPRequest.URL.RawQuery = "abc=123"
req.Build()
if a, e := req.HTTPRequest.URL.Path, expectPath; a != e {
t.Errorf("expect path %q, got %q", e, a)
}
if a, e := req.HTTPRequest.URL.RawPath, expectPath; a != e {
t.Errorf("expect raw path %q, got %q", e, a)
}
if a, e := req.HTTPRequest.URL.RawQuery, "abc=123"; a != e {
t.Errorf("expect query to be %q, got %q", e, a)
}
}
| TestBuildCorrectURI |
encoder.rs | use crate::encoding::ceil8;
use crate::read::levels::get_bit_width;
use super::super::bitpacking;
use super::super::uleb128;
use super::super::zigzag_leb128;
/// Encodes an iterator of `i32` according to parquet's `DELTA_BINARY_PACKED`.
/// # Implementation
/// * This function does not allocate on the heap.
/// * The number of mini-blocks is always 1. This may change in the future.
pub fn | <I: Iterator<Item = i32>>(mut iterator: I, buffer: &mut Vec<u8>) {
let block_size = 128;
let mini_blocks = 1;
let mut container = [0u8; 10];
let encoded_len = uleb128::encode(block_size, &mut container);
buffer.extend_from_slice(&container[..encoded_len]);
let encoded_len = uleb128::encode(mini_blocks, &mut container);
buffer.extend_from_slice(&container[..encoded_len]);
let length = iterator.size_hint().1.unwrap();
let encoded_len = uleb128::encode(length as u64, &mut container);
buffer.extend_from_slice(&container[..encoded_len]);
let mut values = [0i64; 128];
let mut deltas = [0u32; 128];
let first_value = iterator.next().unwrap().into();
let (container, encoded_len) = zigzag_leb128::encode(first_value);
buffer.extend_from_slice(&container[..encoded_len]);
let mut prev = first_value;
let mut length = iterator.size_hint().1.unwrap();
while length != 0 {
for (i, v) in (0..128).zip(&mut iterator) {
let v: i64 = v.into();
values[i] = v - prev;
prev = v;
}
let consumed = std::cmp::min(length - iterator.size_hint().1.unwrap(), 128);
let values = &values[..consumed];
let min_delta = *values.iter().min().unwrap();
let max_delta = *values.iter().max().unwrap();
values.iter().zip(deltas.iter_mut()).for_each(|(v, d)| {
*d = (v - min_delta) as u32;
});
// <min delta> <list of bitwidths of miniblocks> <miniblocks>
let (container, encoded_len) = zigzag_leb128::encode(min_delta);
buffer.extend_from_slice(&container[..encoded_len]);
let num_bits = get_bit_width((max_delta - min_delta) as i16) as u8;
buffer.push(num_bits);
if num_bits > 0 {
let start = buffer.len();
// bitpack encode all (deltas.len = 128 which is a multiple of 32)
let bytes_needed = start + ceil8(deltas.len() * num_bits as usize);
buffer.resize(bytes_needed, 0);
bitpacking::encode(deltas.as_ref(), num_bits, &mut buffer[start..]);
let bytes_needed = start + ceil8(deltas.len() * num_bits as usize);
buffer.truncate(bytes_needed);
}
length = iterator.size_hint().1.unwrap();
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn constant_delta() {
// header: [128, 1, 1, 5, 2]:
// block size: 128 <=u> 128, 1
// mini-blocks: 1 <=u> 1
// elements: 5 <=u> 5
// first_value: 2 <=z> 1
// block1: [2, 0, 0, 0, 0]
// min_delta: 1 <=z> 2
// bitwidth: 0
let data = 1i32..=5;
let expected = vec![128u8, 1, 1, 5, 2, 2, 0];
let mut buffer = vec![];
encode(data, &mut buffer);
assert_eq!(expected, buffer);
}
#[test]
fn negative_min_delta() {
// max - min = 1 - -4 = 5
let data = vec![1i32, 2, 3, 4, 5, 1];
// header: [128, 1, 4, 6, 2]
// block size: 128 <=u> 128, 1
// mini-blocks: 1 <=u> 1
// elements: 6 <=u> 5
// first_value: 2 <=z> 1
// block1: [7, 3, 253, 255]
// min_delta: -4 <=z> 7
// bitwidth: 3
// values: [5, 5, 5, 5, 0] <=b> [
// 0b01101101
// 0b00001011
// ]
let mut expected = vec![128u8, 1, 1, 6, 2, 7, 3, 0b01101101, 0b00001011];
expected.extend(std::iter::repeat(0).take(128 * 3 / 8 - 2)); // 128 values, 3 bits, 2 already used
let mut buffer = vec![];
encode(data.into_iter(), &mut buffer);
assert_eq!(expected, buffer);
}
}
| encode |
managedcluster_import_detach.go | // Copyright Contributors to the Open Cluster Management project
package managedcluster
import (
"context"
"fmt"
"strconv"
"time"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/klog"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
clusterv1 "github.com/open-cluster-management/api/cluster/v1"
libgometav1 "github.com/open-cluster-management/library-go/pkg/apis/meta/v1"
hivev1 "github.com/openshift/hive/pkg/apis/hive/v1"
clientcmdapi "k8s.io/client-go/tools/clientcmd/api"
"github.com/open-cluster-management/applier/pkg/applier"
"github.com/open-cluster-management/applier/pkg/templateprocessor"
)
func (r *ReconcileManagedCluster) importCluster(
managedCluster *clusterv1.ManagedCluster,
clusterDeployment *hivev1.ClusterDeployment,
autoImportSecret *corev1.Secret) (res reconcile.Result, err error) {
res = reconcile.Result{}
//Assuming that is a local import
client := r.client
//A clusterDeployment exist then get the client
if clusterDeployment != nil {
if !clusterDeployment.Spec.Installed {
klog.Infof("cluster %s not yet installed", clusterDeployment.Name)
return reconcile.Result{Requeue: true, RequeueAfter: 1 * time.Minute},
nil
}
klog.Infof("Use hive client to import cluster %s", managedCluster.Name)
client, err = r.getManagedClusterClientFromHive(clusterDeployment, managedCluster)
if err != nil {
return reconcile.Result{}, err
}
//Testing to avoid update which will generate roundtrip as the clusterDeployment is watched
if !libgometav1.HasFinalizer(clusterDeployment, managedClusterFinalizer) {
klog.Info("Add finalizer in clusterDeployment")
libgometav1.AddFinalizer(clusterDeployment, managedClusterFinalizer)
err = r.client.Update(context.TODO(), clusterDeployment)
if err != nil {
return reconcile.Result{}, err
}
}
}
//Check if auto-import and get client from the importSecret
if autoImportSecret != nil {
klog.Infof("Use autoImportSecret to import cluster %s", managedCluster.Name)
client, err = r.getManagedClusterClientFromAutoImportSecret(autoImportSecret)
}
if err == nil {
res, err = r.importClusterWithClient(managedCluster, autoImportSecret, client)
}
if err != nil && autoImportSecret != nil {
errUpdate := r.updateAutoImportRetry(managedCluster, autoImportSecret)
if errUpdate != nil {
return res, errUpdate
}
}
return res, err
}
//get the client from hive clusterDeployment credentials secret
func (r *ReconcileManagedCluster) getManagedClusterClientFromHive(
clusterDeployment *hivev1.ClusterDeployment,
managedCluster *clusterv1.ManagedCluster) (client.Client, error) {
managedClusterKubeSecret := &corev1.Secret{}
err := r.client.Get(context.TODO(), types.NamespacedName{
Name: clusterDeployment.Spec.ClusterMetadata.AdminKubeconfigSecretRef.Name,
Namespace: managedCluster.Name,
},
managedClusterKubeSecret)
if err != nil {
return nil, err
}
return getClientFromKubeConfig(managedClusterKubeSecret.Data["kubeconfig"])
}
//Get the client from the auto-import-secret
func (r *ReconcileManagedCluster) getManagedClusterClientFromAutoImportSecret(
autoImportSecret *corev1.Secret) (client.Client, error) {
//generate client using kubeconfig
if k, ok := autoImportSecret.Data["kubeconfig"]; ok {
return getClientFromKubeConfig(k)
}
token, tok := autoImportSecret.Data["token"]
server, sok := autoImportSecret.Data["server"]
if tok && sok {
return getClientFromToken(string(token), string(server))
}
return nil, fmt.Errorf("kubeconfig or token and server are missing")
}
//Create client from kubeconfig
func getClientFromKubeConfig(kubeconfig []byte) (client.Client, error) {
config, err := clientcmd.Load(kubeconfig)
if err != nil {
return nil, err
}
rconfig, err := clientcmd.NewDefaultClientConfig(
*config,
&clientcmd.ConfigOverrides{}).ClientConfig()
if err != nil {
return nil, err
}
client, err := client.New(rconfig, client.Options{})
if err != nil {
return nil, err
}
return client, nil
}
//Create client from token and server
func getClientFromToken(token, server string) (client.Client, error) |
func (r *ReconcileManagedCluster) updateAutoImportRetry(
managedCluster *clusterv1.ManagedCluster,
autoImportSecret *corev1.Secret) error {
if autoImportSecret != nil {
//Decrement the autoImportRetry
autoImportRetry, err := strconv.Atoi(string(autoImportSecret.Data[autoImportRetryName]))
if err != nil {
return err
}
klog.Infof("Retry left to import %s: %d", managedCluster.Name, autoImportRetry)
autoImportRetry--
//Remove if negatif as a label can not start with "-", should start by a char
if autoImportRetry < 0 {
err = r.client.Delete(context.TODO(), autoImportSecret)
if err != nil {
return err
}
autoImportSecret = nil
} else {
v := []byte(strconv.Itoa(autoImportRetry))
autoImportSecret.Data[autoImportRetryName] = v
err := r.client.Update(context.TODO(), autoImportSecret)
if err != nil {
return err
}
}
}
return nil
}
//importCluster import a cluster if autoImportRetry > 0
func (r *ReconcileManagedCluster) importClusterWithClient(
managedCluster *clusterv1.ManagedCluster,
autoImportSecret *corev1.Secret,
managedClusterClient client.Client) (reconcile.Result, error) {
klog.Infof("Importing cluster: %s", managedCluster.Name)
//Do not create SA if already exists
excluded := make([]string, 0)
sa := &corev1.ServiceAccount{}
if err := managedClusterClient.Get(context.TODO(),
types.NamespacedName{
Name: "klusterlet",
Namespace: klusterletNamespace,
}, sa); err == nil {
excluded = append(excluded, "klusterlet/service_account.yaml")
}
//Generate crds and yamls
crds, yamls, err := generateImportYAMLs(r.client, managedCluster, excluded)
if err != nil {
return reconcile.Result{Requeue: true, RequeueAfter: 30 * time.Second}, err
}
//Convert crds to Yaml
bb, err := templateprocessor.ToYAMLsUnstructured(crds)
if err != nil {
return reconcile.Result{}, err
}
//create applier for crds
a, err := applier.NewApplier(
templateprocessor.NewYamlStringReader(templateprocessor.ConvertArrayOfBytesToString(bb),
templateprocessor.KubernetesYamlsDelimiter),
nil,
managedClusterClient,
nil,
nil,
applier.DefaultKubernetesMerger, nil)
if err != nil {
return reconcile.Result{}, err
}
//Create the crds resources
err = a.CreateOrUpdateInPath(".", nil, false, nil)
if err != nil {
return reconcile.Result{Requeue: true, RequeueAfter: 30 * time.Second}, err
}
//Convert yamls to yaml
bb, err = templateprocessor.ToYAMLsUnstructured(yamls)
if err != nil {
return reconcile.Result{}, err
}
//Create applier for yamls
a, err = applier.NewApplier(
templateprocessor.NewYamlStringReader(templateprocessor.ConvertArrayOfBytesToString(bb),
templateprocessor.KubernetesYamlsDelimiter),
nil,
managedClusterClient,
nil,
nil,
applier.DefaultKubernetesMerger,
nil)
if err != nil {
return reconcile.Result{}, err
}
//Create the yamls resources
err = a.CreateOrUpdateInPath(".", excluded, false, nil)
if err != nil {
return reconcile.Result{Requeue: true, RequeueAfter: 30 * time.Second}, err
}
//Succeeded do not retry, then remove the autoImportRetryLabel
if autoImportSecret != nil {
if err := r.client.Delete(context.TODO(), autoImportSecret); err != nil {
return reconcile.Result{}, err
}
}
klog.Infof("Successfully imported %s", managedCluster.Name)
return reconcile.Result{}, nil
}
func (r *ReconcileManagedCluster) managedClusterDeletion(instance *clusterv1.ManagedCluster) (reconcile.Result, error) {
reqLogger := log.WithValues("Instance.Namespace", instance.Namespace, "Instance.Name", instance.Name)
reqLogger.Info(fmt.Sprintf("Instance in Terminating: %s", instance.Name))
if len(filterFinalizers(instance, []string{managedClusterFinalizer, registrationFinalizer})) != 0 {
return reconcile.Result{Requeue: true, RequeueAfter: 1 * time.Minute}, nil
}
offLine := checkOffLine(instance)
reqLogger.Info(fmt.Sprintf("deleteAllOtherManifestWork: %s", instance.Name))
err := deleteAllOtherManifestWork(r.client, instance)
if err != nil {
if !offLine {
return reconcile.Result{}, err
}
}
if offLine {
reqLogger.Info(fmt.Sprintf("evictAllOtherManifestWork: %s", instance.Name))
err = evictAllOtherManifestWork(r.client, instance)
if err != nil {
return reconcile.Result{}, err
}
}
reqLogger.Info(fmt.Sprintf("deleteKlusterletManifestWorks: %s", instance.Name))
err = deleteKlusterletManifestWorks(r.client, instance)
if err != nil {
return reconcile.Result{}, err
}
if !offLine {
return reconcile.Result{Requeue: true, RequeueAfter: 1 * time.Minute}, nil
}
reqLogger.Info(fmt.Sprintf("evictKlusterletManifestWorks: %s", instance.Name))
err = evictKlusterletManifestWorks(r.client, instance)
if err != nil {
return reconcile.Result{}, err
}
reqLogger.Info(fmt.Sprintf("Remove all finalizer: %s", instance.Name))
instance.ObjectMeta.Finalizers = nil
if err := r.client.Update(context.TODO(), instance); err != nil {
return reconcile.Result{}, err
}
return reconcile.Result{Requeue: true, RequeueAfter: 5 * time.Second}, nil
}
| {
//Create config
config := clientcmdapi.NewConfig()
config.Clusters["default"] = &clientcmdapi.Cluster{
Server: server,
InsecureSkipTLSVerify: true,
}
config.AuthInfos["default"] = &clientcmdapi.AuthInfo{
Token: token,
}
config.Contexts["default"] = &clientcmdapi.Context{
Cluster: "default",
AuthInfo: "default",
}
config.CurrentContext = "default"
clientConfig := clientcmd.NewDefaultClientConfig(*config, &clientcmd.ConfigOverrides{})
restConfig, err := clientConfig.ClientConfig()
if err != nil {
return nil, err
}
clientClient, err := client.New(restConfig, client.Options{})
if err != nil {
return nil, err
}
return clientClient, nil
} |
config_override.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
生产环境标准配置 | configs = {
'db':{
'host':'127.0.0.1'
}
} | '''
|
v1_endpoints_list.py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.16
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1EndpointsList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1Endpoints]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""V1EndpointsList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1EndpointsList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1EndpointsList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1EndpointsList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1EndpointsList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1EndpointsList. # noqa: E501
List of endpoints. # noqa: E501
:return: The items of this V1EndpointsList. # noqa: E501
:rtype: list[V1Endpoints]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1EndpointsList.
List of endpoints. # noqa: E501
:param items: The items of this V1EndpointsList. # noqa: E501
:type: list[V1Endpoints]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1EndpointsList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1EndpointsList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
|
@property
def metadata(self):
"""Gets the metadata of this V1EndpointsList. # noqa: E501
:return: The metadata of this V1EndpointsList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1EndpointsList.
:param metadata: The metadata of this V1EndpointsList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1EndpointsList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1EndpointsList):
return True
return self.to_dict() != other.to_dict()
| """Sets the kind of this V1EndpointsList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1EndpointsList. # noqa: E501
:type: str
"""
self._kind = kind |
testsuite_default_memHierarchy_sdl.py | # -*- coding: utf-8 -*-
from sst_unittest import *
from sst_unittest_support import *
import os.path
################################################################################
# Code to support a single instance module initialize, must be called setUp method
module_init = 0
module_sema = threading.Semaphore()
def initializeTestModule_SingleInstance(class_inst):
global module_init
global module_sema
module_sema.acquire()
if module_init != 1:
try:
# Put your single instance Init Code Here
pass
except:
pass
module_init = 1
module_sema.release()
################################################################################
################################################################################
################################################################################
class testcase_memHierarchy_sdl(SSTTestCase):
def initializeClass(self, testName):
super(type(self), self).initializeClass(testName)
# Put test based setup code here. it is called before testing starts
# NOTE: This method is called once for every test
def setUp(self):
super(type(self), self).setUp()
initializeTestModule_SingleInstance(self)
# Put test based setup code here. it is called once before every test
def tearDown(self):
# Put test based teardown code here. it is called once after every test
super(type(self), self).tearDown()
#####
def test_memHierarchy_sdl_1(self):
# sdl-1 Simple CPU + 1 level cache + Memory
self.memHierarchy_Template("sdl-1")
def test_memHierarchy_sdl_2(self):
# sdl-2 Simple CPU + 1 level cache + DRAMSim Memory
self.memHierarchy_Template("sdl-2")
def test_memHierarchy_sdl_3(self):
# sdl-3 Simple CPU + 1 level cache + DRAMSim Memory (alternate block size)
self.memHierarchy_Template("sdl-3")
def test_memHierarchy_sdl2_1(self):
# sdl2-1 Simple CPU + 2 levels cache + Memory
self.memHierarchy_Template("sdl2-1")
def test_memHierarchy_sdl3_1(self):
# sdl3-1 2 Simple CPUs + 2 levels cache + Memory
self.memHierarchy_Template("sdl3-1")
def test_memHierarchy_sdl3_2(self):
# sdl3-2 2 Simple CPUs + 2 levels cache + DRAMSim Memory
self.memHierarchy_Template("sdl3-2")
def test_memHierarchy_sdl3_3(self):
self.memHierarchy_Template("sdl3-3")
def test_memHierarchy_sdl4_1(self):
self.memHierarchy_Template("sdl4-1")
@skip_on_sstsimulator_conf_empty_str("DRAMSIM", "LIBDIR", "DRAMSIM is not included as part of this build")
def test_memHierarchy_sdl4_2_dramsim(self):
self.memHierarchy_Template("sdl4-2", ignore_err_file=True)
@skip_on_sstsimulator_conf_empty_str("RAMULATOR", "LIBDIR", "RAMULATOR is not included as part of this build")
def test_memHierarchy_sdl4_2_ramulator(self):
self.memHierarchy_Template("sdl4-2-ramulator")
@skip_on_sstsimulator_conf_empty_str("DRAMSIM", "LIBDIR", "DRAMSIM is not included as part of this build")
def test_memHierarchy_sdl5_1_dramsim(self):
self.memHierarchy_Template("sdl5-1", ignore_err_file=True)
@skip_on_sstsimulator_conf_empty_str("RAMULATOR", "LIBDIR", "RAMULATOR is not included as part of this build")
def test_memHierarchy_sdl5_1_ramulator(self):
if testing_check_get_num_ranks() > 1 or testing_check_get_num_threads() > 1:
self.memHierarchy_Template("sdl5-1-ramulator_MC")
else:
self.memHierarchy_Template("sdl5-1-ramulator")
def test_memHierarchy_sdl8_1(self):
self.memHierarchy_Template("sdl8-1")
def test_memHierarchy_sdl8_3(self):
self.memHierarchy_Template("sdl8-3")
def test_memHierarchy_sdl8_4(self):
self.memHierarchy_Template("sdl8-4")
def test_memHierarchy_sdl9_1(self):
self.memHierarchy_Template("sdl9-1")
def test_memHierarchy_sdl9_2(self):
|
#####
def memHierarchy_Template(self, testcase, ignore_err_file=False):
# Get the path to the test files
test_path = self.get_testsuite_dir()
outdir = self.get_test_output_run_dir()
tmpdir = self.get_test_output_tmp_dir()
# Some tweeking of file names are due to inconsistencys with testcase name
testcasename_sdl = testcase.replace("_MC", "")
testcasename_out = testcase.replace("-", "_")
# Set the various file paths
testDataFileName=("test_memHierarchy_{0}".format(testcasename_out))
sdlfile = "{0}/{1}.py".format(test_path, testcasename_sdl)
reffile = "{0}/refFiles/{1}.out".format(test_path, testDataFileName)
outfile = "{0}/{1}.out".format(outdir, testDataFileName)
errfile = "{0}/{1}.err".format(outdir, testDataFileName)
mpioutfiles = "{0}/{1}.testfile".format(outdir, testDataFileName)
log_debug("testcase = {0}".format(testcase))
log_debug("sdl file = {0}".format(sdlfile))
log_debug("ref file = {0}".format(reffile))
# Run SST in the tests directory
self.run_sst(sdlfile, outfile, errfile, set_cwd=test_path, mpi_out_files=mpioutfiles)
# Lines to ignore
# These are generated by DRAMSim
ignore_lines = ["===== MemorySystem"]
ignore_lines.append("TOTAL_STORAGE : 2048MB | 1 Ranks | 16 Devices per rank")
ignore_lines.append("== Loading")
ignore_lines.append("DRAMSim2 Clock Frequency =1Hz, CPU Clock Frequency=1Hz")
ignore_lines.append("WARNING: UNKNOWN KEY 'DEBUG_TRANS_FLOW' IN INI FILE")
# This is generated by SST when the number of ranks/threads > # of components
ignore_lines.append("WARNING: No components are assigned to")
#These are warnings/info generated by SST/memH in debug mode
ignore_lines.append("Notice: memory controller's region is larger than the backend's mem_size")
ignore_lines.append("Region: start=")
# This may be present if ranks < 2
ignore_lines.append("not aligned to the request size")
# Statistics that count occupancy on each cycle sometimes diff in parallel execution
# due to the synchronization interval sometimes allowing the clock to run ahead a cycle or so
tol_stats = { "outstanding_requests" : [0, 0, 20, 0, 0], # Only diffs in number of cycles
"total_cycles" : [20, 'X', 20, 20, 20], # This stat is set once at the end of sim. May vary in all fields
"MSHR_occupancy" : [0, 0, 20, 0, 0] } # Only diffs in number of cycles
filesAreTheSame, statDiffs, othDiffs = testing_stat_output_diff(outfile, reffile, ignore_lines, tol_stats, True)
# Perform the tests
if ignore_err_file is False:
if os_test_file(errfile, "-s"):
log_testing_note("memHierarchy SDL test {0} has a Non-Empty Error File {1}".format(testDataFileName, errfile))
if filesAreTheSame:
log_debug(" -- Output file {0} passed check against the Reference File {1}".format(outfile, reffile))
else:
diffdata = self._prettyPrintDiffs(statDiffs, othDiffs)
log_failure(diffdata)
self.assertTrue(filesAreTheSame, "Output file {0} does not pass check against the Reference File {1} ".format(outfile, reffile))
###
# Remove lines containing any string found in 'remove_strs' from in_file
# If out_file != None, output is out_file
# Otherwise, in_file is overwritten
def _remove_lines_cleanup_file(self, remove_strs, in_file, out_file = None, append = False):
with open(in_file, 'r') as fp:
lines = fp.readlines()
if out_file == None:
out_file = in_file
if append == True:
mode = 'a'
else:
mode = 'w'
with open(out_file, mode) as fp:
if not append:
fp.truncate(0)
for line in lines:
skip = False
for search in remove_strs:
if search in line:
skip = True
continue
if not skip:
fp.write(line)
def _prettyPrintDiffs(self, stat_diff, oth_diff):
out = ""
if len(stat_diff) != 0:
out = "Statistic diffs:\n"
for x in stat_diff:
out += (x[0] + " " + ",".join(str(y) for y in x[1:]) + "\n")
if len(oth_diff) != 0:
out += "Non-statistic diffs:\n"
for x in oth_diff:
out += x[0] + " " + x[1] + "\n"
return out
| self.memHierarchy_Template("sdl9-2") |
04_test.py | import os
import sys
import importlib
import argparse
import csv
import numpy as np
import time
import pickle
import pathlib
import gzip
import tensorflow as tf
import tensorflow.contrib.eager as tfe
import svmrank
import utilities
from utilities_tf import load_batch_gcnn
def load_batch_flat(sample_files, feats_type, augment_feats, normalize_feats):
cand_features = []
cand_choices = []
cand_scoress = []
for i, filename in enumerate(sample_files):
cand_states, cand_scores, cand_choice = utilities.load_flat_samples(filename, feats_type, 'scores', augment_feats, normalize_feats)
cand_features.append(cand_states)
cand_choices.append(cand_choice)
cand_scoress.append(cand_scores)
n_cands_per_sample = [v.shape[0] for v in cand_features]
cand_features = np.concatenate(cand_features, axis=0).astype(np.float32, copy=False)
cand_choices = np.asarray(cand_choices).astype(np.int32, copy=False)
cand_scoress = np.concatenate(cand_scoress, axis=0).astype(np.float32, copy=False)
n_cands_per_sample = np.asarray(n_cands_per_sample).astype(np.int32, copy=False)
return cand_features, n_cands_per_sample, cand_choices, cand_scoress
def padding(output, n_vars_per_sample, fill=-1e8):
n_vars_max = tf.reduce_max(n_vars_per_sample)
output = tf.split(
value=output,
num_or_size_splits=n_vars_per_sample,
axis=1,
)
output = tf.concat([
tf.pad(
x,
paddings=[[0, 0], [0, n_vars_max - tf.shape(x)[1]]],
mode='CONSTANT',
constant_values=fill)
for x in output
], axis=0)
return output
def process(policy, dataloader, top_k):
mean_kacc = np.zeros(len(top_k))
n_samples_processed = 0
for batch in dataloader:
if policy['type'] == 'gcnn':
c, ei, ev, v, n_cs, n_vs, n_cands, cands, best_cands, cand_scores = batch
pred_scores = policy['model']((c, ei, ev, v, tf.reduce_sum(n_cs, keepdims=True), tf.reduce_sum(n_vs, keepdims=True)), tf.convert_to_tensor(False))
# filter candidate variables
pred_scores = tf.expand_dims(tf.gather(tf.squeeze(pred_scores, 0), cands), 0)
elif policy['type'] == 'ml-competitor':
cand_feats, n_cands, best_cands, cand_scores = batch
# move to numpy
cand_feats = cand_feats.numpy()
n_cands = n_cands.numpy()
# feature normalization
cand_feats = (cand_feats - policy['feat_shift']) / policy['feat_scale']
pred_scores = policy['model'].predict(cand_feats)
# move back to TF
pred_scores = tf.convert_to_tensor(pred_scores.reshape((1, -1)), dtype=tf.float32)
# padding
pred_scores = padding(pred_scores, n_cands)
true_scores = padding(tf.reshape(cand_scores, (1, -1)), n_cands)
true_bestscore = tf.reduce_max(true_scores, axis=-1, keepdims=True)
assert all(true_bestscore.numpy() == np.take_along_axis(true_scores.numpy(), best_cands.numpy().reshape((-1, 1)), axis=1))
kacc = []
for k in top_k:
pred_top_k = tf.nn.top_k(pred_scores, k=k)[1].numpy()
pred_top_k_true_scores = np.take_along_axis(true_scores.numpy(), pred_top_k, axis=1)
kacc.append(np.mean(np.any(pred_top_k_true_scores == true_bestscore.numpy(), axis=1)))
kacc = np.asarray(kacc)
batch_size = int(n_cands.shape[0])
mean_kacc += kacc * batch_size
n_samples_processed += batch_size
mean_kacc /= n_samples_processed
return mean_kacc
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'problem',
help='MILP instance type to process.',
choices=['setcover', 'cauctions', 'facilities', 'indset'],
)
parser.add_argument(
'-g', '--gpu',
help='CUDA GPU id (-1 for CPU).',
type=int,
default=0,
)
args = parser.parse_args()
print(f"problem: {args.problem}")
print(f"gpu: {args.gpu}")
os.makedirs("results", exist_ok=True)
result_file = f"results/{args.problem}_validation_{time.strftime('%Y%m%d-%H%M%S')}.csv"
seeds = [0, 1, 2, 3, 4]
gcnn_models = ['baseline']
other_models = ['extratrees_gcnn_agg', 'lambdamart_khalil', 'svmrank_khalil']
test_batch_size = 128
top_k = [1, 3, 5, 10]
problem_folders = {
'setcover': 'setcover/500r_1000c_0.05d',
'cauctions': 'cauctions/100_500',
'facilities': 'facilities/100_100_5',
'indset': 'indset/500_4',
}
problem_folder = problem_folders[args.problem]
if args.problem == 'setcover':
gcnn_models += ['mean_convolution', 'no_prenorm']
result_file = f"results/{args.problem}_test_{time.strftime('%Y%m%d-%H%M%S')}"
result_file = result_file + '.csv'
os.makedirs('results', exist_ok=True)
### TENSORFLOW SETUP ###
if args.gpu == -1:
os.environ['CUDA_VISIBLE_DEVICES'] = ''
else:
|
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
tf.enable_eager_execution(config)
tf.executing_eagerly()
test_files = list(pathlib.Path(f"data/samples/{problem_folder}/test").glob('sample_*.pkl'))
test_files = [str(x) for x in test_files]
print(f"{len(test_files)} test samples")
evaluated_policies = [['gcnn', model] for model in gcnn_models] + \
[['ml-competitor', model] for model in other_models]
fieldnames = [
'policy',
'seed',
] + [
f'acc@{k}' for k in top_k
]
with open(result_file, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for policy_type, policy_name in evaluated_policies:
print(f"{policy_type}:{policy_name}...")
for seed in seeds:
rng = np.random.RandomState(seed)
tf.set_random_seed(rng.randint(np.iinfo(int).max))
policy = {}
policy['name'] = policy_name
policy['type'] = policy_type
if policy['type'] == 'gcnn':
# load model
sys.path.insert(0, os.path.abspath(f"models/{policy['name']}"))
import model
importlib.reload(model)
del sys.path[0]
policy['model'] = model.GCNPolicy()
policy['model'].restore_state(f"trained_models/{args.problem}/{policy['name']}/{seed}/best_params.pkl")
policy['model'].call = tfe.defun(policy['model'].call, input_signature=policy['model'].input_signature)
policy['batch_datatypes'] = [tf.float32, tf.int32, tf.float32,
tf.float32, tf.int32, tf.int32, tf.int32, tf.int32, tf.int32, tf.float32]
policy['batch_fun'] = load_batch_gcnn
else:
# load feature normalization parameters
try:
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/normalization.pkl", 'rb') as f:
policy['feat_shift'], policy['feat_scale'] = pickle.load(f)
except:
policy['feat_shift'], policy['feat_scale'] = 0, 1
# load model
if policy_name.startswith('svmrank'):
policy['model'] = svmrank.Model().read(f"trained_models/{args.problem}/{policy['name']}/{seed}/model.txt")
else:
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/model.pkl", 'rb') as f:
policy['model'] = pickle.load(f)
# load feature specifications
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/feat_specs.pkl", 'rb') as f:
feat_specs = pickle.load(f)
policy['batch_datatypes'] = [tf.float32, tf.int32, tf.int32, tf.float32]
policy['batch_fun'] = lambda x: load_batch_flat(x, feat_specs['type'], feat_specs['augment'], feat_specs['qbnorm'])
test_data = tf.data.Dataset.from_tensor_slices(test_files)
test_data = test_data.batch(test_batch_size)
test_data = test_data.map(lambda x: tf.py_func(
policy['batch_fun'], [x], policy['batch_datatypes']))
test_data = test_data.prefetch(2)
test_kacc = process(policy, test_data, top_k)
print(f" {seed} " + " ".join([f"acc@{k}: {100*acc:4.1f}" for k, acc in zip(top_k, test_kacc)]))
writer.writerow({
**{
'policy': f"{policy['type']}:{policy['name']}",
'seed': seed,
},
**{
f'acc@{k}': test_kacc[i] for i, k in enumerate(top_k)
},
})
csvfile.flush()
| os.environ['CUDA_VISIBLE_DEVICES'] = f'{args.gpu}' |
globalconfiguration_test.go | package validation
import (
"testing"
"github.com/nginxinc/kubernetes-ingress/pkg/apis/configuration/v1alpha1"
"k8s.io/apimachinery/pkg/util/validation/field"
)
func createGlobalConfigurationValidator() *GlobalConfigurationValidator {
return &GlobalConfigurationValidator{}
}
func TestValidateGlobalConfiguration(t *testing.T) {
globalConfiguration := v1alpha1.GlobalConfiguration{
Spec: v1alpha1.GlobalConfigurationSpec{
Listeners: []v1alpha1.Listener{
{
Name: "tcp-listener",
Port: 53, | Port: 53,
Protocol: "UDP",
},
},
},
}
gcv := createGlobalConfigurationValidator()
err := gcv.ValidateGlobalConfiguration(&globalConfiguration)
if err != nil {
t.Errorf("ValidateGlobalConfiguration() returned error %v for valid input", err)
}
}
func TestValidateListenerPort(t *testing.T) {
forbiddenListenerPorts := map[int]bool{
1234: true,
}
gcv := &GlobalConfigurationValidator{
forbiddenListenerPorts: forbiddenListenerPorts,
}
allErrs := gcv.validateListenerPort(5555, field.NewPath("port"))
if len(allErrs) > 0 {
t.Errorf("validateListenerPort() returned errors %v for valid input", allErrs)
}
allErrs = gcv.validateListenerPort(1234, field.NewPath("port"))
if len(allErrs) == 0 {
t.Errorf("validateListenerPort() returned no errors for invalid input")
}
}
func TestValidateListeners(t *testing.T) {
listeners := []v1alpha1.Listener{
{
Name: "tcp-listener",
Port: 53,
Protocol: "TCP",
},
{
Name: "udp-listener",
Port: 53,
Protocol: "UDP",
},
}
gcv := createGlobalConfigurationValidator()
allErrs := gcv.validateListeners(listeners, field.NewPath("listeners"))
if len(allErrs) > 0 {
t.Errorf("validateListeners() returned errors %v for valid intput", allErrs)
}
}
func TestValidateListenersFails(t *testing.T) {
tests := []struct {
listeners []v1alpha1.Listener
msg string
}{
{
listeners: []v1alpha1.Listener{
{
Name: "tcp-listener",
Port: 2201,
Protocol: "TCP",
},
{
Name: "tcp-listener",
Port: 2202,
Protocol: "TCP",
},
},
msg: "duplicated name",
},
{
listeners: []v1alpha1.Listener{
{
Name: "tcp-listener-1",
Port: 2201,
Protocol: "TCP",
},
{
Name: "tcp-listener-2",
Port: 2201,
Protocol: "TCP",
},
},
msg: "duplicated port/protocol combination",
},
}
gcv := createGlobalConfigurationValidator()
for _, test := range tests {
allErrs := gcv.validateListeners(test.listeners, field.NewPath("listeners"))
if len(allErrs) == 0 {
t.Errorf("validateListeners() returned no errors for invalid input for the case of %s", test.msg)
}
}
}
func TestValidateListener(t *testing.T) {
listener := v1alpha1.Listener{
Name: "tcp-listener",
Port: 53,
Protocol: "TCP",
}
gcv := createGlobalConfigurationValidator()
allErrs := gcv.validateListener(listener, field.NewPath("listener"))
if len(allErrs) > 0 {
t.Errorf("validateListener() returned errors %v for valid intput", allErrs)
}
}
func TestValidateListenerFails(t *testing.T) {
tests := []struct {
Listener v1alpha1.Listener
msg string
}{
{
Listener: v1alpha1.Listener{
Name: "@",
Port: 2201,
Protocol: "TCP",
},
msg: "invalid name",
},
{
Listener: v1alpha1.Listener{
Name: "tcp-listener",
Port: -1,
Protocol: "TCP",
},
msg: "invalid port",
},
{
Listener: v1alpha1.Listener{
Name: "name",
Port: 2201,
Protocol: "IP",
},
msg: "invalid protocol",
},
{
Listener: v1alpha1.Listener{
Name: "tls-passthrough",
Port: 2201,
Protocol: "TCP",
},
msg: "name of a built-in listener",
},
}
gcv := createGlobalConfigurationValidator()
for _, test := range tests {
allErrs := gcv.validateListener(test.Listener, field.NewPath("listener"))
if len(allErrs) == 0 {
t.Errorf("validateListener() returned no errors for invalid input for the case of %s", test.msg)
}
}
}
func TestGeneratePortProtocolKey(t *testing.T) {
port := 53
protocol := "UDP"
expected := "53/UDP"
result := generatePortProtocolKey(port, protocol)
if result != expected {
t.Errorf("generatePortProtocolKey(%d, %q) returned %q but expected %q", port, protocol, result, expected)
}
} | Protocol: "TCP",
},
{
Name: "udp-listener", |
action.go | // Copyright 2020 IBM Corp.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package manifests
import (
"context"
"fmt"
emperrors "emperror.dev/errors"
"github.com/redhat-marketplace/redhat-marketplace-operator/pkg/utils/patch"
. "github.com/redhat-marketplace/redhat-marketplace-operator/pkg/utils/reconcileutils"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/jsonmergepatch"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
)
type createOrUpdateFactoryItemAction struct {
*BaseAction
object runtime.Object
factoryFunc func() (runtime.Object, error)
owner runtime.Object
patcher patch.Patcher
}
type CreateOrUpdateFactoryItemArgs struct {
Owner runtime.Object
Patcher patch.Patcher
}
func CreateOrUpdateFactoryItemAction(
newObj runtime.Object,
factoryFunc func() (runtime.Object, error),
args CreateOrUpdateFactoryItemArgs,
) *createOrUpdateFactoryItemAction |
func (a *createOrUpdateFactoryItemAction) Bind(result *ExecResult) {
a.SetLastResult(result)
}
func (a *createOrUpdateFactoryItemAction) Exec(ctx context.Context, c *ClientCommand) (*ExecResult, error) {
reqLogger := a.GetReqLogger(c)
result, err := a.factoryFunc()
if err != nil {
reqLogger.Error(err, "failure creating factory obj")
return NewExecResult(Error, reconcile.Result{Requeue: true}, err), emperrors.Wrap(err, "error with patch")
}
key, err := client.ObjectKeyFromObject(result)
if err != nil {
reqLogger.Error(err, "failure getting factory obj name")
return NewExecResult(Error, reconcile.Result{Requeue: true}, err), emperrors.Wrap(err, "error with patch")
}
cmd := HandleResult(
GetAction(key, a.object),
OnNotFound(CreateAction(result,
CreateWithAddOwner(a.owner),
CreateWithPatch(a.patcher))),
OnContinue(Call(func() (ClientAction, error) {
// handle case if original config is missing
if orig, _ := a.patcher.GetOriginalConfiguration(a.object); orig == nil {
data, _ := a.patcher.GetModifiedConfiguration(a.object, false)
a.patcher.SetOriginalConfiguration(a.object, data)
}
patch, err := a.patcher.Calculate(a.object, result)
if err != nil {
return nil, emperrors.Wrap(err, "error creating patch")
}
if patch.IsEmpty() {
return nil, nil
}
err = a.patcher.SetLastAppliedAnnotation(result)
if err != nil {
return nil, emperrors.Wrap(err, "error creating patch")
}
patch, err = a.patcher.Calculate(a.object, result)
if err != nil {
return nil, emperrors.Wrap(err, "error creating patch")
}
if patch.IsEmpty() {
return nil, nil
}
reqLogger.Info("updating with patch",
"patch", string(patch.Patch),
)
jsonPatch, err := jsonmergepatch.CreateThreeWayJSONMergePatch(patch.Original, patch.Modified, patch.Current)
if err != nil {
return nil, emperrors.Wrap(err, "Failed to generate merge patch")
}
return UpdateWithPatchAction(a.object, types.MergePatchType, jsonPatch), nil
})))
cmd.Bind(a.GetLastResult())
return c.Do(ctx, cmd)
}
type createIfNotExistsAction struct {
*BaseAction
factoryFunc func() (runtime.Object, error)
newObject runtime.Object
createActionOptions []CreateActionOption
}
func CreateIfNotExistsFactoryItem(
newObj runtime.Object,
factoryFunc func() (runtime.Object, error),
opts ...CreateActionOption,
) *createIfNotExistsAction {
return &createIfNotExistsAction{
newObject: newObj,
createActionOptions: opts,
factoryFunc: factoryFunc,
BaseAction: NewBaseAction("createIfNotExistsAction"),
}
}
func (a *createIfNotExistsAction) Bind(result *ExecResult) {
a.SetLastResult(result)
}
func (a *createIfNotExistsAction) Exec(ctx context.Context, c *ClientCommand) (*ExecResult, error) {
reqLogger := a.GetReqLogger(c)
result, err := a.factoryFunc()
if err != nil {
reqLogger.Error(err, "failure creating factory obj")
return NewExecResult(Error, reconcile.Result{Requeue: true}, err), emperrors.Wrap(err, "error with create")
}
key, _ := client.ObjectKeyFromObject(result)
reqLogger = reqLogger.WithValues("requestType", fmt.Sprintf("%T", a.newObject), "key", key)
reqLogger.V(0).Info("Creating object if not found", "object", result)
return c.Do(
ctx,
HandleResult(
GetAction(key, a.newObject),
OnNotFound(
HandleResult(
CreateAction(result, a.createActionOptions...),
OnRequeue(ContinueResponse()),
),
),
),
)
}
| {
return &createOrUpdateFactoryItemAction{
BaseAction: NewBaseAction("createOrUpdateFactoryItem"),
object: newObj,
factoryFunc: factoryFunc,
owner: args.Owner,
patcher: args.Patcher,
}
} |
org_quality_report.py | from django.db.models import Q
from apps.configattribute.models import ConfigAttribute
from apps.property.models import GenericProperty
from apps.utils.data_helpers.manager import DataManager
from apps.utils.iotile.variable import SYSTEM_VID
from apps.utils.timezone_utils import display_formatted_ts
class TripInfo(object):
block = None
data = {}
slug = None
last_update = None
def __init__(self, block):
self.block = block
self.slug = block.slug
self.data = {
'summary': {},
'properties': {}
}
self.last_update = None
def add_property(self, key, value):
self.data['properties'][key] = value
def add_summary_event(self, event):
if 'summary' in self.data:
if self.last_update and self.last_update > event.timestamp:
return
self.data['summary'] = event.extra_data
# Trip Summary should win over Trip Update
self.last_update = event.timestamp
def to_representation(self):
data = {
'slug': self.slug,
'label': self.block.title,
'summary_date': display_formatted_ts(self.last_update) if self.last_update else '',
'data': self.data
}
return data
class TripOrgQualityReport(object):
org = None
results = {}
config = {}
def __init__(self, org):
self.org = org
self.results = {}
self.config = self._get_config_attributes()
def _get_config_attributes(self):
config_name = ':report:trip_quality:config'
attribute = ConfigAttribute.objects.get_attribute_by_priority(name=config_name, target_slug=self.org.obj_target_slug)
if attribute:
return attribute.data
# Return empty if it does not exist
return {
'summary_keys': [
"Device",
"START (UTC)",
"END (UTC)",
"Duration (Days)",
"Event Count",
"First event at (UTC)",
"Last event at (UTC)",
"Max Humidity (% RH)",
"Min Humidity (% RH)",
"Median Humidity (% RH)",
"Max Pressure (Mbar)",
"Min Pressure (Mbar)",
"Median Pressure (Mbar)",
"Max Temp (C)",
"Min Temp (C)",
"Median Temp (C)",
"Above 30C",
"Below 17C",
"Max Peak (G)",
"TimeStamp(MaxPeak) (UTC)",
"DeltaV at Max Peak (in/s)",
"MaxDeltaV (in/s)",
"TimeStamp(MaxDeltaV) (UTC)",
"Peak at MaxDeltaV (G)"
],
'property_keys': []
}
def | (self):
"""
Get all archives for an organization and fill a TripInfo object for each with the following
- Selected trip properties (based on project's configAttribute)
- Last Update Event, if any
- Last Trip Summary Event, if any
:return: Nothing
"""
blocks = self.org.data_blocks.all()
for block in blocks:
self.results[block.slug] = TripInfo(block)
block_slugs = [block.slug for block in blocks]
if self.config and 'property_keys' in self.config:
for property_item in self.config['property_keys']:
properties = GenericProperty.objects.filter(target__in=block_slugs, name=property_item)
for p in properties:
self.results[p.target].add_property(property_item, p.value)
# Not great, but we seem to have blocks with project as None and blocks as p--0000
q = Q(project_slug='') | Q(project_slug='p--0000-0000')
q = q & Q(device_slug__in=block_slugs, variable_slug__icontains=SYSTEM_VID['TRIP_SUMMARY'])
events = DataManager.filter_qs_using_q(
'event',
q=q
)
for event in events:
self.results[event.device_slug].add_summary_event(event)
# Cleanup reports that don't look complete (No Summary or Properties)
to_delete = []
for slug, trip in self.results.items():
if trip.data['summary'] == {}:
# Delete Archive that does not represent a real trip
to_delete.append(slug)
for slug in to_delete:
del(self.results[slug])
| analyze |
forms.py | # -*- coding: utf-8 -*-
"""Public forms."""
from flask_wtf import Form
from wtforms import PasswordField, StringField
from wtforms.validators import DataRequired
from kelbyapp.user.models import User
class LoginForm(Form):
"""Login form."""
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
def __init__(self, *args, **kwargs):
"""Create instance."""
super(LoginForm, self).__init__(*args, **kwargs)
self.user = None
def validate(self):
"""Validate the form."""
initial_validation = super(LoginForm, self).validate()
if not initial_validation:
return False
self.user = User.query.filter_by(username=self.username.data).first()
if not self.user:
|
if not self.user.check_password(self.password.data):
self.password.errors.append('Invalid password')
return False
if not self.user.active:
self.username.errors.append('User not activated')
return False
return True
| self.username.errors.append('Unknown username')
return False |
commands.py | import asyncio, datetime, discord, json, pycountry, random, re, requests, time, traceback
from aioconsole import ainput
from word2number import w2n
from client import *
from datamanager import config, del_data, get_data, has_data, mod_data, set_data, batch_set_data
from discordutils import *
from league import *
async def dm(user, *a, **k):
channel = user.dm_channel
if channel is None:
channel = await user.create_dm()
await channel.send(*a, **k)
@client.command("", ["help"], "", "")
@client.command("General Commands", ["help", "rpg"], "help [rpg]", "post a list of commands")
async def command_help(command, message):
sections = {}
for section, _, syntax, description, _ in client.commands:
if section == "" or ((section == "RPG Commands") ^ (len(command) == 3)): continue
if section not in sections:
sections[section] = []
sections[section].append(f"`{syntax}` - {description}")
embed = discord.Embed(
title = "Help - Commands",
color = client.color
)
for section in sections:
embed.add_field(name = section, value = "\n".join(sections[section]), inline = False)
await dm(message.author, embed = embed)
await send(message, "Sent the command list to your DMs!")
@client.command("General Commands", ["ping"], "ping", "check your ping")
async def command_ping(command, message):
ping = int((time.time() - (message.created_at - datetime.datetime(1970, 1, 1)) / datetime.timedelta(seconds = 1)) * 1000)
await send(message, f"Pong! ({ping} ms)", reaction = "🏓")
@client.command("Channel Type Commands", ["subscribe"], "subscribe", "announce updates to this channel")
async def command_subscribe(command, message):
await mod_data("announcement_channels", lambda x: x | {message.channel.id}, default = set())
await send(message, "Subscribed to status updates here!")
@client.command("Channel Type Commands", ["unsubscribe"], "unsubscribe", "stop announcing updates to this channel")
async def command_unsubscribe(command, message):
await mod_data("announcement_channels", lambda x: x - {message.channel.id}, default = set())
await send(message, "Unsubscribed from status updates here!")
@client.command("Channel Type Commands", ["watch", ("osu", "genshin")], "watch osu/genshin", "watch osu!/Genshin Impact updates here")
async def command_watch(command, message):
await mod_data("watch_channels", command[2], lambda x: x | {message.channel.id}, default = set())
await send(message, "Now watching " + {"osu": "osu!", "genshin": "Genshin Impact"}[command[2]] + " updates in this channel!")
@client.command("Channel Type Commands", ["unwatch", ("osu", "genshin")], "unwatch osu/genshin", "stop watching osu!/Genshin Impact updates here")
async def command_watch(command, message):
await mod_data("watch_channels", command[2], lambda x: x - {message.channel.id}, default = set())
await send(message, "No longer watching " + {"osu": "osu!", "genshin": "Genshin Impact"}[command[2]] + " updates in this channel!")
words = None
wordmap = {}
with open("data/words.txt") as f:
words = [x for x in f.read().strip().splitlines() if 5 <= len(x)]
for word in words:
key = "".join(sorted(word))
if key not in wordmap:
wordmap[key] = set()
wordmap[key].add(word)
anagram_lock = asyncio.Lock()
def display(actual, scrambled, hint):
if hint == 0: return scrambled
cl = list(scrambled)
start = actual[:hint if hint * 2 <= len(actual) else -hint]
end = actual[-hint:]
for c in start + end:
cl.remove(c)
return f"**{start}**{''.join(cl)}**{end}**"
async def anagram_function(message, answer = None, start = False, stop = False, hint = False, reorder = False):
global words, wordmap
async with anagram_lock:
active = await has_data("anagram", message.channel.id, "puzzle")
puzzle = await get_data("anagram", message.channel.id, "puzzle", default = "", set_if_missing = False)
answers = wordmap.get("".join(sorted(puzzle)), set())
current_hint = await get_data("anagram", message.channel.id, "hint", default = 0, set_if_missing = False)
if reorder:
if active:
charlist = list(puzzle)
random.shuffle(charlist)
puzzle = "".join(charlist)
await set_data("anagram", message.channel.id, "puzzle", puzzle)
await send(message, f"Reordered: solve for '{display(sorted(answers)[0], puzzle, current_hint)}' ({len(puzzle)}).")
else:
await send(message, "There is no ongoing anagram puzzle in this channel!", reaction = "x")
if hint:
if active:
if len(puzzle) - current_hint * 2 - 2 <= 1:
stop = True
else:
await set_data("anagram", message.channel.id, "hint", current_hint + 1)
await send(message, f"Hint: 2 more letters shown: solve for '{display(sorted(answers)[0], puzzle, current_hint + 1)}' ({len(puzzle)}).")
else:
await send(message, "There is no ongoing anagram puzzle in this channel!", reaction = "x")
if stop:
if active:
if len(answers) == 1:
await send(message, f"Anagram puzzle ended! The correct answer was '{list(answers)[0]}'.")
else:
await send(message, f"Anagram puzzle ended! The correct answers were {english_list(quote(answers))}.")
await del_data("anagram", message.channel.id)
active = False
else:
await send(message, "There is no ongoing anagram puzzle in this channel!", reaction = "x")
if active and answer in answers:
try:
points = len(answer) - 2 * await get_data("anagram", message.channel.id, "hint")
bonus = int(points / 2) * (time.time() - await get_data("anagram", message.channel.id, "timestamp", default = 0) <= 5)
await mod_data("leaderboard", "anagram", message.author.id, lambda x: x + points + bonus, default = 0)
await batch_set_data("anagram", message.channel.id, active = False, last = answers, lasttime = time.time())
active = False
bonus_display = f" **+{bonus}**" if bonus else ""
alt_display = f" (Alternative answers: {english_list(quote(answers - {answer}))})" if len(answers) > 1 else ""
await send(message, f"Congratulations to {message.author.mention} for winning the anagram puzzle! (+{points}{bonus_display}){alt_display}", allowed_mentions = discord.AllowedMentions.none())
start = True
except:
print(traceback.format_exc())
elif answer in await get_data("anagram", message.channel.id, "last", default = set()) and time.time() - await get_data("anagram", message.channel.id, "lasttime", default = 0) <= 1:
await send(message, f"{message.author.mention} L", reaction = "x", allowed_mentions = discord.AllowedMentions.none())
if start:
if active:
hint = await get_data("anagram", message.channel.id, "hint", default = 0)
actual = sorted(answers)[0]
await send(message, f"An anagram puzzle is already running! Solve for '{display(actual, puzzle, hint)}' ({len(puzzle)}).", reaction = "x")
else:
word = random.choice(words)
charlist = list(word)
random.shuffle(charlist)
scrambled = "".join(charlist)
await batch_set_data("anagram", message.channel.id, active = True, puzzle = scrambled, hint = 0, timestamp = time.time())
await send(message, f"Anagram puzzle! Solve for '{scrambled}' ({len(word)}).")
@client.command("Anagram Commands", ["anagram"], "anagram start", "start an anagram puzzle")
async def command_anagram_start(command, message):
await anagram_function(message, start = True)
@client.command("Anagram Commands", ["anagram", "restart"], "anagram restart", "restart the anagram puzzle")
async def command_anagram_restart(command, message):
await anagram_function(message, stop = True, start = True)
@client.command("Anagram Commands", ["anagram", "stop"], "anagram stop", "stop the anagram puzzle")
async def command_anagram_stop(command, message):
await anagram_function(message, stop = True)
@client.command("Anagram Commands", ["anagram", "shuffle"], "anagram shuffle", "alias for `anagram reorder`")
@client.command("Anagram Commands", ["anagram", "scramble"], "anagram scramble", "alias for `anagram reorder`")
@client.command("Anagram Commands", ["anagram", "reorder"], "anagram reorder", "reorder the anagram puzzle")
async def command_anagram_reorder(command, message):
await anagram_function(message, reorder = True)
@client.command("Anagram Commands", ["anagram", "hint"], "anagram hint", "show another character in the anagram puzzle")
async def command_anagram_hint(command, message):
awa | client.command("Anagram Commands", ["anagram", "add", "?"], "anagram add <word>", "add a word to the anagram dictionary")
async def command_anagram_add(command, message):
global words, wordmap
word = command[3].strip().lower()
if all(char in "abcdefghijklmnopqrstuvwxyz" for char in word):
if word in words:
await send(message, "This word is already in the dictionary!", reaction = "x")
else:
words.append(word)
words.sort()
with open("data/words.txt", "w") as f:
f.write("\n".join(words))
key = "".join(sorted(word))
if key not in wordmap:
wordmap[key] = set()
wordmap[key].add(word)
await send(message, f"Added '{word}' to the dictionary!")
else:
await send(message, "Words must only contain letters!", reaction = "x")
@client.command("Anagram Commands", ["anagram", "rm", "?"], "anagram rm <word>", "alias for `anagram remove`")
@client.command("Anagram Commands", ["anagram", "remove", "?"], "anagram remove <word>", "remove a word from the anagram dictionary")
async def command_anagram_remove(command, message):
global words, wordmap
word = command[3].strip().lower()
if word in words:
words.remove(word)
with open("data/words.txt", "w") as f:
f.write("\n".join(words))
key = "".join(sorted(word))
wordmap[key].discard(word)
await send(message, f"Removed '{word}' from the dictionary!")
else:
await send(message, "This word is not in the dictionary!", reaction = "x")
@client.command("Anagram Commands", ["anagram", "lb"], "anagram lb", "alias for `anagram leaderboard`")
@client.command("Anagram Commands", ["anagram", "leaderboard"], "anagram leaderboard", "show the leaderboard for the anagram puzzle")
async def command_anagram_leaderboard(command, message):
scores = []
scoremap = await get_data("leaderboard", "anagram")
for member in message.guild.members:
score = scoremap.get(member.id, 0)
if score:
scores.append((score, member))
scores.sort(reverse = True)
await send(message, embed = discord.Embed(
title = "Leaderboard - Anagram",
description = "\n".join(f"{member.mention} - {score}" for score, member in scores)
))
@client.command("", lambda m: True, "", "")
async def command_anagram_answer(command, message):
try:
await anagram_function(message, answer = message.content.strip().strip("!@#$%^&*()[]{}/|\.,<>\"'").lower())
except:
pass
@client.command("User Commands", ["alias", "?", "?"], "alias <name> <user>", "alias a name to a user")
async def command_alias(command, message):
member = await get_member(message.guild, command[3], message.author)
await set_data("aliases", message.guild.id, command[2].lower(), member.id)
await send(message, f"Aliased '{command[2].lower()}' to {member.mention}!", allowed_mentions = discord.AllowedMentions.none())
@client.command("User Commands", ["unalias", "?"], "unalias <name>", "remove a name's alias")
async def command_unalias(command, message):
await set_data("aliases", message.guild.id, command[2].lower(), None)
await send(message, f"Removed the alias for '{command[2].lower()}'!")
@client.command("User Commands", ["unbonk", "?", "..."], "unbonk <user>", "alias for `unignore`")
@client.command("User Commands", ["unignore", "?", "..."], "unignore <user>", "make the bot no longer ignore messages from a particular user (on a server)")
@client.command("User Commands", ["bonk", "?", "..."], "bonk <user>", "alias for `ignore`")
@client.command("User Commands", ["ignore", "?", "..."], "ignore <user>", "make the bot ignore all messages from a particular user (on a server)")
async def command_ignore(command, message):
for uinfo in command[2:]:
member = await get_member(message.guild, uinfo, message.author)
if not command[1].startswith("un") and member == message.author:
await send(message, f"You cannot {command[1]} yourself!", reaction = "x")
else:
await set_data("ignore", message.guild.id, member.id, not command[1].startswith("un"))
await send(message, f"No longer ignoring {member.mention}!" if command[1].startswith("un") else f"{'Bonk! ' * (command[1] == 'bonk')}Now ignoring {member.mention}!", allowed_mentions = discord.AllowedMentions.none())
@client.command("User Commands", ["unshut", "?", "..."], "unbonk <user>", "alias for `unsilence`")
@client.command("User Commands", ["unsilence", "?", "..."], "unignore <user>", "make the bot delete messages from a particular user (on a server)")
@client.command("User Commands", ["shut", "?", "..."], "bonk <user>", "alias for `silence`")
@client.command("User Commands", ["silence", "?", "..."], "ignore <user>", "make the bot delete messages from a particular user (on a server)")
async def command_silence(command, message):
for uinfo in command[2:]:
member = await get_member(message.guild, uinfo, message.author)
if not command[1].startswith("un") and member == message.author:
await send(message, f"You cannot {command[1]} yourself!", reaction = "x")
else:
await set_data("silence", message.guild.id, member.id, not command[1].startswith("un"))
await send(message, f"No longer silencing {member.mention}!" if command[1].startswith("un") else f"{'https://i.redd.it/l5jmlb1ltqj51.jpg' * (command[1] == 'shut')}Now silencing {member.mention}!", allowed_mentions = discord.AllowedMentions.none())
# @client.command("Role Commands", ["gib", "?", "..."], "gib <name> [roles...]", "alias for `role give`")
# @client.command("Role Commands", ["role", "give", "?", "..."], "role give <name> [roles...]", "give a list of roles to a user")
# async def command_role_give(command, message):
# user, *names = command[2 if command[1] == "gib" else 3:]
# member = await get_member(message.guild, user, message.author)
# roles = [get_role(message.guild, string) for string in names]
# if any(role.id == 741731868692709416 for role in roles) and member.id != 251082987360223233:
# await send(message, f"<@&741731868692709416> is exclusive to <@!251082987360223233>!", allowed_mentions = discord.AllowedMentions.none())
# else:
# await member.add_roles(*roles)
# await send(message, f"Granted {english_list(quote(role.mention for role in roles))} to {member.mention}!", allowed_mentions = discord.AllowedMentions(roles = False))
# @client.command("Role Commands", ["gibnt", "?", "..."], "gibnt <name> [roles...]", "alias for `role remove`")
# @client.command("Role Commands", ["role", "remove", "?", "..."], "role remove <name> [roles...]", "remove a list of roles from a user")
# async def command_role_remove(command, message):
# user, *names = command[2 if command[1] == "gibnt" else 3:]
# member = await get_member(message.guild, user, message.author)
# roles = [get_role(message.guild, string) for string in names]
# await member.remove_roles(*roles)
# await send(message, f"Removed {english_list(quote(role.mention for role in roles))} from {member.mention}!", allowed_mentions = discord.AllowedMentions(roles = False))
@client.command("", ["role", "colour", "?"], "", "")
@client.command("", ["role", "color", "?"], "", "")
@client.command("Role Commands", ["role", "colour", "?", "?"], "role colour <role> [colour = 0]", "alias for `role color`")
@client.command("Role Commands", ["role", "color", "?", "?"], "role color <role> [color = 0]", "recolor a role, or remove its color")
async def command_role_color(command, message):
role = get_role(message.guild, command[3])
await role.edit(color = get_color(command[4] if len(command) > 4 else "0"))
await send(message, f"Recolored '{role.mention}'!", allowed_mentions = discord.AllowedMentions.none())
@client.command("Role Commands", ["role", "rename", "?", "?"], "role rename <role> <name>", "rename a role")
async def command_role_rename(command, message):
role = get_role(message.guild, command[3])
name = role.name
await role.edit(name = command[4])
await send(message, f"Renamed '{name}' to '{command[4]}'!")
services = {
"lol": "lol",
"league": "lol",
"dmoj": "dmoj",
"cf": "cf",
"codeforces": "cf",
"osu": "osu",
"ow": "ow",
"overwatch": "ow"
}
service_list = tuple(services)
@client.command("", [service_list, "link", "?"], "", "")
@client.command("External User Commands", [service_list, "link", "?", "?"], "<lol/league | cf/codeforces | dmoj | osu | ow/overwatch> link [user = me] <account>", "link a user to an external account")
async def command_link(command, message):
service = services[command[1]]
member = await get_member(message.guild, command[3] if len(command) == 5 else "me", message.author)
await set_data("external", service, member.id, command[-1])
await send(message, f"Linked {member.mention} to {command[-1]}!", allowed_mentions = discord.AllowedMentions.none())
@client.command("", [service_list, "unlink"], "", "")
@client.command("External User Commands", [service_list, "unlink", "?"], "<lol/league | cf/codeforces | dmoj | osu | ow/overwatch> unlink [user = me]", "unlink a user from a service")
async def command_link(command, message):
service = services[command[1]]
member = await get_member(message.guild, command[3] if len(command) == 4 else "me", message.author)
await del_data("external", service, member.id)
await send(message, f"Unlinked {member.mention}!", allowed_mentions = discord.AllowedMentions.none())
async def get_ext_user(key, error, command, message):
if len(command) == 3:
if await has_data("external", key, message.author.id):
return await get_data("external", key, message.author.id)
else:
raise BotError(f"You are not linked; please specify {error} or link yourself first!")
else:
try:
member = await get_member(message.guild, command[3], message.author)
if await has_data("external", key, member.id):
return await get_data("external", key, member.id)
except:
pass
return command[3]
@client.command("", [("cf", "codeforces"), ("details", "rank", "rating")], "", "")
@client.command("External User Commands", [("cf", "codeforces"), ("details", "rank", "rating"), "?"], "cf/codeforces <details | rank/rating> [user = me]", "report a codeforces user's public details or just rank+rating")
async def command_cf_details(command, message):
cf = await get_ext_user("cf", "a codeforces user", command, message)
rv = requests.get("https://codeforces.com/api/user.info?handles=" + cf).json()
if rv["status"] == "OK":
cfdata = rv["result"][0]
if command[2] == "rank" or command[2] == "rating":
await send(message, f"{cf} is rank {cfdata['rank']} [{cfdata['rating']}] (max {cfdata['maxRank']} [{cfdata['maxRating']}])!")
else:
embed = discord.Embed(title = cf, color = client.color, url = "https://codeforces.com/profile/" + cf).set_thumbnail(url = "http:" + cfdata["avatar"])
for key, name in [
("email", "Email Address"),
("firstName", "First Name"),
("lastName", "Last Name"),
("organization", "Organization"),
("contribution", "Contribution"),
("friendOfCount", "Friend Of #")
]:
if cfdata.get(key):
embed.add_field(name = name, value = str(cfdata[key]))
if cfdata.get("country") or cfdata.get("city"):
city = f"{cfdata['city']}, " if cfdata.get("city") else ""
embed.add_field(name = "Location", value = f"{city}{cfdata['country']}")
embed.add_field(name = "Current Rank", value = f"{cfdata['rank']} [{cfdata['rating']}]")
embed.add_field(name = "Maximum Rank", value = f"{cfdata['maxRank']} [{cfdata['maxRating']}]")
embed.add_field(name = "Registered Since", value = datetime.datetime.fromtimestamp(cfdata["registrationTimeSeconds"]).strftime("%B %d, %Y at %H:%M:%S"))
embed.add_field(name = "Last Seen Online", value = datetime.datetime.fromtimestamp(cfdata["lastOnlineTimeSeconds"]).strftime("%B %d, %Y at %H:%M:%S"))
await send(message, embed = embed)
else:
await send(message, f"'{cf}' is not a codeforces user!", reaction = "x")
def dmoj_api(URL):
rv = requests.get(URL)
if rv.status_code != 200:
raise BotError(f"'{URL}' returned status {rv.status_code} (not 200)!")
data = rv.json()
if "error" in data:
raise BotError("Error fetching from DMOJ API; likely item does not exist!")
if "data" not in data:
raise BotError("Data not found; check the URL!")
return data["data"]
@client.command("", ["dmoj", ("details", "rank", "rating")], "", "")
@client.command("External User Commands", ["dmoj", ("details", "rank", "rating"), "?"], "dmoj <details | rank/rating> [user = me]", "report a DMOJ user's public details or just rank+rating")
async def command_dmoj_details(command, message):
dm = await get_ext_user("dmoj", "a DMOJ user", command, message)
dmdata = dmoj_api("https://dmoj.ca/api/v2/user/" + dm)["object"]
rating = dmdata["rating"]
if rating < 1000:
rank = "Newbie"
elif rating < 1200:
rank = "Amateur"
elif rating < 1500:
rank = "Expert"
elif rating < 1800:
rank = "Candidate Master"
elif rating < 2200:
rank = "Master"
elif rating < 3000:
rank = "Grandmaster"
else:
rank = "Target"
if dmdata["rank"] == "admin":
rank += " (Admin)"
if command[2] == "rank" or command[2] == "rating":
await send(message, f"{dmdata['username']} is rank {rank} [{rating}]!")
elif command[2] == "details":
await send(message, embed = discord.Embed(
title = dmdata["username"],
color = 0x3333AA,
url = "https://dmoj.ca/user/" + dmdata["username"]
).add_field(
name = "Points",
value = "%.2f" % dmdata["points"]
).add_field(
name = "Solved Problems",
value = str(dmdata["problem_count"])
).add_field(
name = "Contests",
value = str(len(dmdata["contests"]))
).add_field(
name = "Organizations",
value = ", ".join(org["short_name"] for org in dmoj_api("https://dmoj.ca/api/v2/organizations")["objects"] if org["id"] in dmdata["organizations"])
).add_field(
name = "Rank",
value = rank
).add_field(
name = "Rating",
value = str(rating)
))
@client.command("", ["osu", ("details", "summary")], "", "")
@client.command("External User Commands", ["osu", ("details", "summary"), "?"], "osu <details | summary> [player = me]", "report an osu player's public details or summary")
async def command_osu_details(command, message):
osu = await get_ext_user("osu", "an osu! player", command, message)
rv = requests.get(f"https://osu.ppy.sh/api/get_user?k={config['api-keys']['osu']}&u={osu}")
if rv.status_code == 200:
data = rv.json()
if data == []:
await send(message, "Could not find an osu! player by that username/ID!", reaction = "x")
else:
user = data[0]
if command[2] == "summary":
await send(message, embed = discord.Embed(title = f"osu! player details: {user['username']}", description = f"Level {user['level']}\nPP: {user['pp_raw']}\nRank: #{user['pp_rank']} (#{user['pp_country_rank']})\nAccuracy: {user['accuracy']}", color = client.color).set_thumbnail(url = f"http://s.ppy.sh/a/{user['user_id']}"))
else:
seconds = int(user["total_seconds_played"])
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
await send(message, embed = discord.Embed(
title = f"osu! player summary: {user['username']} #{user['user_id']}",
description = f"User since {user['join_date']}",
url = f"https://osu.ppy.sh/users/{user['user_id']}",
color = client.color
).add_field(
name = "Level",
value = user["level"]
).add_field(
name = "Accuracy",
value = user["accuracy"]
).add_field(
name = "Performance Points",
value = user["pp_raw"]
).add_field(
name = "Rank",
value = f"#{user['pp_rank']} (#{user['pp_country_rank']} in {pycountry.countries.get(alpha_2 = user['country']).name})"
).add_field(
name = "Score Counts",
value = " ".join(f"{user['count' + x]} {emoji('osu_' + x)}" for x in ["300", "100", "50"]),
inline = False
).add_field(
name = "Rating Counts",
value = " ".join(f"{user['count_rank_' + x.lower()]} {emoji('osu_' + x)}" for x in ["SSH", "SS", "SH", "S", "A"]),
inline = False
).add_field(
name = "Best Score",
value = user['ranked_score']
).add_field(
name = "Total Score",
value = user['total_score']
).add_field(
name = "Time Played",
value = f"{hours}:{str(minutes).zfill(2)}:{str(seconds).zfill(2)}"
).set_thumbnail(
url = f"http://s.ppy.sh/a/{user['user_id']}"
))
else:
await send(message, f"Failed to fetch from osu! API: status code {rv.status_code}!", reaction = "x")
def display_ow_rank(rating):
try:
rank = int(rating)
if rank < 1500:
e = "ow_bronze"
elif rank < 2000:
e = "ow_silver"
elif rank < 2500:
e = "ow_gold"
elif rank < 3000:
e = "ow_platinum"
elif rank < 3500:
e = "ow_diamond"
elif rank < 4000:
e = "ow_master"
else:
e = "ow_grandmaster"
return f"{rating} {emoji(e)}"
except:
return rating
@client.command("", [("ow", "overwatch"), "summary"], "", "")
@client.command("External User Commands", [("ow", "overwatch"), "summary", "?"], "ow/overwatch summary <player = me>", "report an overwatch player's summary")
async def command_ow_summary(command, message):
ow = await get_ext_user("ow", "a Blizzard battletag", command, message)
try:
r = requests.get(f"https://ow-api.com/v1/stats/pc/us/{ow}/profile")
if r.status_code != 200:
raise RuntimeError("Status Code not 200")
data = r.json()
try:
await send(message, embed = discord.Embed(
title = f"Overwatch player summary: {data['name']}",
description = "",
color = client.color
).add_field(
name = "Level",
value = str(data["level"] + 100 * data["prestige"])
).add_field(
name = "Rating",
value = display_ow_rank(data["rating"])
).add_field(
name = "Games Won",
value = str(data["gamesWon"])
).add_field(
name = "Competitive Winrate",
value = "%.2f%%" % (data["competitiveStats"]["games"]["won"] / data["competitiveStats"]["games"]["played"] * 100) if "games" in data["competitiveStats"] else "N/A"
).set_thumbnail(
url = data["icon"]
))
except:
print(traceback.format_exc())
await send(message, "Failed to generate embed!", reaction = "x")
except:
await send(message, f"Failed to fetch user data for `{ow}` from Overwatch API; check the spelling of this battletag (please format as `name-number`)!", reaction = "x")
@client.command("", [("lol", "league"), ("report", "current", "report-player", "current-player")], "", "")
@client.command("League of Legends Commands", [("lol", "league"), ("report", "current", "report-player", "current-player"), "?"], "lol/league <report | current>[-player] [player = me]", "create a game report for the player")
async def command_lol_report(command, message):
sm = await get_ext_user("lol", "a League of Legends summoner", command, message)
try:
summoner = watcher.summoner.by_name(lol_region, sm)
if command[2] == "report" or command[2] == "report-player":
try:
game = watcher.match.matchlist_by_account(lol_region, summoner["accountId"], end_index = 1)["matches"][0]
try:
if command[2] == "report":
await send(message, embed = await lol_game_embed(message.guild, game["gameId"], sm, False), reaction = "check")
elif command[2] == "report-player":
await send(message, embed = await lol_player_embed(message.guild, game["gameId"], sm, False), reaction = "check")
except:
print(traceback.format_exc())
await send(message, "Failed to create embed!", reaction = "x")
except Exception as e:
await send(message, f"Could not find a game for {lol_region.upper()}/{sm}! The summoner may not have played a proper game recently enough.", reaction = "x")
else:
try:
game = watcher.spectator.by_summoner(lol_region, summoner["id"])
try:
if command[2] == "current":
await send(message, embed = await lol_current_embed(message.guild, game, sm))
elif command[2] == "current-player":
await send(message, embed = await lol_current_player_embed(message.guild, game, [sm]))
except:
print(traceback.format_exc())
await send(message, "Failed to create embed!", reaction = "x")
except Exception as e:
await send(message, f"Could not find current game for {lol_region.upper()}/{sm}! The summoner may not be in game.", reaction = "x")
except:
await send(message, f"Could not find summoner {lol_region.upper()}/{sm}! Please check your spelling.", reaction = "x")
@client.command("League of Legends Commands", [("lol", "league"), "rotation"], "lol/league rotation", "check the current free champion rotation")
async def command_lol_rotation(command, message):
champions = [champs[cid] for cid in watcher.champion.rotations(lol_region)["freeChampionIds"]]
champions.sort()
await send(message, f"This week's free rotation is: {english_list(champions)}.")
@client.command("League of Legends Commands", [("lol", "league"), "ranges", "..."], "lol/league ranges <champion> [champion...]", "compare ability ranges for champions")
async def command_lol_ranges(command, message):
champs = set()
for champ in command[3:]:
champ = champ.lower()
if champ not in cmap:
await send(message, f"{champ} is not a recognized champion name or ID!", reaction = "x")
break
champs.add(cmap[champ])
else:
items = []
for champ in champs:
data = requests.get(f"http://ddragon.leagueoflegends.com/cdn/{lol_version}/data/en_US/champion/{champ}.json").json()
items.append((data["data"][champ]["stats"]["attackrange"], data["data"][champ]["name"], "Basic Attack"))
for i, spell in enumerate(data["data"][champ]["spells"]):
ident = data["data"][champ]["name"] + " " + ("QWER"[i] if 0 <= i < 4 else "?")
if len(set(spell["range"])) == 1:
items.append((spell["range"][0], ident, spell["name"]))
else:
clusters = {}
for i, r in enumerate(spell["range"]):
if r not in clusters:
clusters[r] = []
clusters[r].append(i + 1)
for key in clusters:
items.append((key, ident, spell["name"] + " Rank " + "/".join(map(str, clusters[key]))))
items.sort()
stacked = []
for item in items:
if stacked == [] or item[0] != stacked[-1][0]:
stacked.append([item[0], []])
stacked[-1][1].append((item[1], item[2]))
info = "**Range Analysis**\n"
for rng, stack in stacked:
stack = ", ".join(f"{ident} ({name})" for ident, name in stack)
info += f"\n__{rng}__: {stack}"
await send(message, info, reaction = "check")
@client.command("League of Legends Commands", [("lol", "league"), "item", "?", "..."], "lol item <name>", "get details about an item")
async def command_lol_item(command, message):
item = find_item("".join(command[3:]).lower())
await send(message, embed = discord.Embed(
title = f"League of Legends Item: {item['name']} (#{item['id']})",
description = re.sub("(\\() (.)|(.) (\\))", "\\1\\2\\3\\4", re.sub(" +", " ", re.sub("<[^>]+?>", "", re.sub("<br>|<li>", "\n", item["description"])))),
color = client.color,
url = f"https://leagueoflegends.fandom.com/wiki/{item['name'].replace(' ', '_')}"
).add_field(
name = "Build Path",
value = build_path(item["id"]) + ("\n\nBuilds into: " + english_list(lolitems[key]["name"] for key in item.get("into")) if item.get("into") else "")
).add_field(
name = "Tags",
value = "\n".join("- " + {
"CriticalStrike": "Critical Strike",
"NonbootsMovement": "Movement Speed",
"SpellDamage": "Ability Power",
"MagicPenetration": "Magic Penetration",
"ArmorPenetration": "Armor Penetration",
"SpellBlock": "Magic Resistance",
"Slow": "Movement Reduction",
"Jungle": "Jungling",
"Health": "Health",
"Lane": "Laning",
"Aura": "Aura",
"HealthRegen": "Health Regeneration",
"SpellVamp": "Spell Vamp",
"GoldPer": "Gold Income",
"Mana": "Mana",
"Vision": "Vision",
"LifeSteal": "Physical Vamp",
"Consumable": "Consumable",
"Armor": "Armor",
"Stealth": "Stealth",
"ManaRegen": "Mana Regeneration",
"OnHit": "On-Hit",
"Active": "Active",
"CooldownReduction": "Cooldown Reduction",
"Trinket": "Trinket",
"AttackSpeed": "Attack Speed",
"Boots": "Boots",
"AbilityHaste": "Ability Haste",
"Tenacity": "Tenacity",
"Damage": "Attack Damage"
}[tag] for tag in item["tags"])
).set_thumbnail(
url = f"http://ddragon.leagueoflegends.com/cdn/{lol_version}/img/item/{item['id']}.png"
))
stats_length = 24
async def stats(channel, vis = None):
counts = {}
async for message in channel.history(limit = None):
if not vis or message.author.id in vis:
uinfo = f"{truncate(message.author.name, stats_length - 5)}#{message.author.discriminator}"
counts[uinfo] = counts.get(uinfo, 0) + 1
return sorted(counts.items(), key = lambda a: (-a[1], a[0]))
def truncate(string, length):
if len(string) > length:
return string[:length - 1] + "…"
return string
@client.command("Server Statistics Commands", [("channel", "server"), "stats"], "<channel | server> stats", "output the number of messages sent in each channel by each user")
async def command_channel_stats(command, message):
v = set(m.id for m in message.channel.members)
async with message.channel.typing():
if command[1] == "channel":
s = await stats(message.channel, v)
total = sum(b for _, b in s)
mc = len(str(max(b for _, b in s)))
l = max(len(a) for a, _ in s)
await send(message, embed = discord.Embed(
title = f"Channel Stats for #{message.channel.name}",
description = "```\n" + "\n".join(f"{uinfo.ljust(l)} {str(count).ljust(mc)} ({count / total * 100:.2f}%)" for uinfo, count in s) + "\n```",
color = client.color
))
else:
vis = set(message.channel.members)
counts = {}
ccount = {}
cname = {}
total = 0
failed = 0
for channel in message.guild.channels:
try:
if isinstance(channel, discord.TextChannel):
if set(channel.members) >= vis:
cname[channel.id] = channel.name
for uinfo, count in await stats(channel, v):
counts[uinfo] = counts.get(uinfo, 0) + count
ccount[channel.id] = ccount.get(channel.id, 0) + count
total += count
except:
failed += 1
mc = len(str(max(max(counts.values()), max(ccount.values()))))
ul = max(map(len, counts))
cl = max(map(len, cname.values()))
l = min(max(ul, cl), stats_length)
counts = sorted(counts.items(), key = lambda a: (-a[1], a[0]))
ccount = sorted(ccount.items(), key = lambda a: (-a[1], a[0]))
await send(message, embed = discord.Embed(
title = f"Server Stats for {message.guild.name}",
description = "```\n" + "\n".join(f"{uinfo.ljust(l)} {str(count).ljust(mc)} ({count / total * 100:.2f}%)" for uinfo, count in counts) +
"\n\n" + "\n".join(f"#{truncate(cname[cid].ljust(l - 1), stats_length - 1)} {str(count).ljust(mc)} ({count / total:.2f}%)" for cid, count in ccount) + "\n```",
color = client.color
))
if failed:
await send(message, f"Failed to index the results from {failed} channel{'s' * (failed != 1)}; likely this bot does not have permission to access them.")
@client.command("Miscellaneous Commands", ["blame"], "blame", "blame a random person in this channel (cannot blame any bots)")
async def command_blame(command, message):
members = []
for member in message.channel.members:
if not member.bot:
members.append(member)
await send(message, f"It was {random.choice(members).mention}'s fault!", allowed_mentions = discord.AllowedMentions.none())
@client.command("Miscellaneous Commands", ["spoiler", "image"], "spoiler image", "accept an image in a DM to spoiler (for mobile users)")
async def command_spoiler_image(command, message):
try:
await dm(message.author, f"The next image(s) you DM to me will be sent to {message.guild.name}#{message.channel.name} as a spoiler.")
await message.delete()
await set_data("dm_spoiler", message.author.id, message.channel.id)
except:
await send(message, "You need to allow me to DM you to use this feature!", reaction = "x")
@client.command("Miscellaneous Commands", ["color", "image"], "color image", "auto-color the next image you send in this channel with DeepAI")
async def command_spoiler_image(command, message):
await send(message, f"The next image you send in this channel will be automatically colored with the power of Artificial Intelligence.")
await set_data("img_color", message.author.id, message.channel.id, 0)
async def nhentai(nhid, force = False):
if force or not await has_data("nhentai", nhid):
response = requests.get(f"https://nhentai.net/g/{nhid}")
if response.status_code == 404:
raise BotError("404 Not Found!")
elif response.status_code == 200:
t = response.text
urls = {x.replace("t.", "i.", 1).replace("t.", ".") for x in re.findall("https://t\\.nhentai\\.net/galleries/\\d+/\\d+t\\.\\w+", t)}
urls = sorted(urls, key = lambda s: [int(x) for x in re.findall("\\d+", s)])
title = re.findall("<span class=\"pretty\">\\s*(.+?)\\s*</span>", t)[0]
subtitle = re.findall("<span class=\"after\">\\s*(.+?)\\s*</span>", t)[0]
sauce = int(re.findall("\\d+", urls[0])[0])
await set_data("nhentai", nhid, (title, subtitle, sauce, urls))
return (title, subtitle, sauce, urls)
else:
raise BotError(f"Unknown error: {response.status_code}")
else:
return await get_data("nhentai", nhid)
@client.command("Genshin Commands", ["genshin", "info", "..."], "genshin info <item>", "get info on an item (must enter the internal ID; ask a developer if unsure but it's not too counterintuitive)")
async def command_genshin_info(command, message):
item = " ".join(command[3:]).lower()
await client.genshin_info(item, message.channel)
await message.add_reaction("✅")
async def resin_set(user, amt):
await set_data("genshin", "resin_info", user.id, time.time() - 8 * 60 * amt)
async def resin_rmd(user):
return await get_data("genshin", "resin_reminder", user.id, default = -1)
async def resin_amount(uid):
if await has_data("genshin", "resin_info", uid):
return min(160, (time.time() - await get_data("genshin", "resin_info", uid)) / 8 / 60)
else:
return -1
def hm(s):
h, m = divmod(int(s // 60), 60)
return str(h) + "h" + str(m).zfill(2) if h else str(m) + "m"
@client.command("Genshin Commands", ["genshin", "resin", "set", "?"], "genshin resin set <amount>", "tell me how much resin you currently have")
async def command_genshin_resin_set(command, message):
amt = int(command[4])
await resin_set(message.author, amt)
cur = await resin_rmd(message.author)
msg = await send(message, "Set your resin!" + ("" if cur == -1 else f" Your existing reminder, set for {cur} resin, will occur in {hm(8 * 60 * (cur - amt))}."))
if message.guild:
await message.delete(delay = 5)
await msg.delete(delay = 5)
@client.command("Genshin Commands", ["genshin", "resin", "now"], "genshin resin now", "check how much resin you currently have")
async def command_genshin_resin_now(command, message):
amt = await resin_amount(message.author.id)
cur = await resin_rmd(message.author)
if amt == -1:
await send(message, "You haven't told me how much resin you have yet!", reaction = "x")
else:
await send(message, f"You currently have {int(amt)} resin!" + ("" if cur == -1 else f" Your reminder, set for {cur} resin, will occur in {hm(8 * 60 * (cur - amt))}."))
@client.command("Genshin Commands", ["genshin", "resin", "reminder"], "genshin resin reminder [[amount] <desired = 160>] / stop", "set / stop a reminder for when you reach a specific amount of resin; your current amount is optional if you've already set your resin amount")
@client.command("", ["genshin", "resin", "reminder", "?"], "", "")
@client.command("", ["genshin", "resin", "reminder", "?", "?"], "", "")
async def command_genshin_resin_reminder(command, message):
if len(command) == 5 and command[4] == "stop":
msg = await send(message, "I will no longer remind you about your resin!")
await del_data("genshin", "resin_reminder", message.author.id)
else:
if len(command) <= 5:
if not await has_data("genshin", "resin_info", message.author.id):
raise BotError("You need to tell me how much resin you have with `genshin resin set` or specify the amount you currently have!")
des = int(command[4]) if len(command) == 5 else 160
amt = await resin_amount(message.author.id)
else:
amt = int(command[4])
await resin_set(message.author, amt)
des = int(command[5])
if des > 160:
raise BotError("You cannot have more than 160 resin without using Fragile Resin to exceed that cap manually!")
if des <= amt:
raise BotError("You already have that much resin!")
cur = await resin_rmd(message.author)
if cur == -1:
msg = await send(message, f"I will remind you when you reach {des} resin (in {hm(8 * 60 * (des - amt))})!")
else:
msg = await send(message, f"You previously had a reminder for when you reached {cur} resin; I will instead remind you when you reach {des} (in {hm(8 * 60 * (des - amt))})!")
await set_data("genshin", "resin_reminder", message.author.id, des)
if message.guild:
await message.delete(delay = 5)
await msg.delete(delay = 5)
@client.command("", [("nhentai", "fnhentai"), "?"], "", "")
async def command_nhentai(command, message):
nhid = int(command[2])
title, subtitle, sauce, urls = await nhentai(nhid, command[1] == "fnhentai")
reply = await send(message, embed = discord.Embed(title = title + " " + subtitle, url = f"https://nhentai.net/g/{nhid}", description = f"Page 1 / {len(urls)}").set_image(url = urls[0]))
await reply.add_reaction("⬅️")
await reply.add_reaction("➡️")
await set_data("nhentai_embed", reply.id, (nhid, 0))
import httpx
import img2pdf, os
from PIL import Image
from PyPDF3 import PdfFileMerger
from io import BytesIO
async def get_async(url):
async with httpx.AsyncClient() as client:
return await client.get(url)
@client.command("", ["nhdownload", "?"], "", "")
async def command_nhdownload(command, message):
async with message.channel.typing():
nhid = int(command[2])
title, subtitle, sauce, urls = await nhentai(nhid, True)
try:
os.mkdir(f"/tmp/{nhid}")
except:
pass
merger = PdfFileMerger()
responses = await asyncio.gather(*map(get_async, urls))
for page, r in enumerate(responses):
pdf_path = f"/tmp/{nhid}/{page}.pdf"
pdf_bytes = img2pdf.convert(r.content)
with open(pdf_path, "wb") as f:
f.write(pdf_bytes)
merger.append(pdf_path)
final_path = f"/tmp/{nhid}/final.pdf"
merger.write(final_path)
merger.close()
try:
with open(final_path, "rb") as f:
await send(message, file = discord.File(fp = f, filename = f"[{nhid}] {title}.pdf"))
except:
await send(message, f"The file is too large to upload; you can access it here: https://dev.hyper-neutrino.xyz/nh/{nhid}")
@client.command("", lambda m: True, "", "")
async def command_image_spoiler_reply(command, message):
if type(message.channel) == discord.DMChannel:
if len(message.attachments) > 0:
if await has_data("dm_spoiler", message.author.id):
await client.get_channel(await get_data("dm_spoiler", message.author.id)).send(files = [(await attachment.to_file(spoiler = True)) for attachment in message.attachments])
await del_data("dm_spoiler", message.author.id)
@client.command("", lambda m: True, "", "")
async def command_image_spoiler_reply(command, message):
if len(message.attachments) > 0:
if await has_data("img_color", message.author.id, message.channel.id):
r = requests.post("https://api.deepai.org/api/colorizer", data = {"image": message.attachments[0].url}, headers = {"api-key": "551549c3-8d2c-426b-ae9f-9211b13e6f14"})
await send(message, r.json()["output_url"])
await del_data("img_color", message.author.id, message.channel.id)
@client.command("", ["echo", "..."], "echo <message>", "echo the message")
async def command_echo(command, message):
await send(message, message.content[message.content.find("echo") + 4:])
@client.command("", ["say", "..."], "say <message>", "echo, then immediately delete the command")
async def command_say(command, message):
await send(message, message.content[message.content.find("say") + 3:])
await message.delete()
@client.command("", ["eval", "?", "..."], "eval <expr>", "evaluate a Python expression in a command function's scope")
async def command_eval(command, message):
if message.author.id not in config["sudo"]:
await send(message, "You must be a sudo user to do that!", reaction = "x")
else:
try:
code = message.content[message.content.find("eval") + 4:].strip()
if code.startswith("```python"):
code = code[9:]
elif code.startswith("```py"):
code = code[5:]
code = code.strip("`")
await send(message, str(eval(code))[:2000])
except:
await send(message, "Error evaluating expression!", reaction = "x")
@client.command("", ["exec", "?", "..."], "exec <code>", "execute Python code in a command function's scope (print is replaced with message output)")
async def command_exec(command, message):
if message.author.id not in config["sudo"]:
await send(message, "You must be a sudo user to do that!", reaction = "x")
else:
try:
code = message.content[message.content.find("exec") + 4:].strip()
if code.startswith("```python"):
code = code[9:]
elif code.startswith("```py"):
code = code[5:]
code = code.strip("`")
output = []
def print(*items, end = "\n", sep = " "):
output.extend(list(sep.join(map(str, items)) + end))
exec(code)
await send(message, "```python\n" + "".join(output[:1980]) + "\n```")
except:
await send(message, "Error executing expression!", reaction = "x")
@client.command("", ["adjust", "ehecd", "?"], "adjust ehecd <x>", "adjust the cooldown of ehe te nandayo")
async def command_exec(command, message):
if message.author.id not in config["sudo"]:
await send(message, "You must be a sudo user to do that!", reaction = "x")
else:
try:
await set_data("ehecd", int(command[3]))
await send(message, f"Cooldown of 'ehe te nandayo' is now {command[3]} second{'s' * (command[3] != '1')}!")
except:
await send(message, "Error; make sure you entered an integer!", reaction = "x")
@client.command("", ["data", "..."], "data", "fetch data from the bot")
async def command_data(command, message):
if message.author.id not in config["sudo"]:
await send(message, "You must be a sudo user to do that!")
else:
await send(message, "```python\n" + str(await get_data(*map(eval, command[2:]), default = None, set_if_missing = False))[:1980] + "\n```")
@client.command("", ["identify", "?"], "identify <user>", "identify a user")
async def command_identify(command, message):
member = await get_member(message.guild, command[2], message.author)
await send(message, f"Identified {member.name}#{member.discriminator}, a.k.a {member.display_name}, I.D. {member.id} ({member.mention})", allowed_mentions = discord.AllowedMentions.none())
@client.command("", ["emoji", "?", "-"], "", "")
@client.command("", ["emoji", "?"], "emoji <lookup> [-]", "post an emoji by lookup ID")
async def command_emoji(command, message):
try:
await send(message, str(emoji(command[2])))
if len(command) == 4:
await message.delete()
except:
await send(message, "That resulted in an error.", reaction = "x")
raise
@client.command("", [("summary", "summarize"), "?"], "", "")
@client.command("", [("summary", "summarize"), "?", "?"], "", "")
@client.command("", [("summary", "summarize"), "?", "?", "?"], "", "")
async def command_summarize(command, message):
url = command[2]
if url[0] == "<" and url[-1] == ">":
url = url[1:-1]
await message.edit(suppress = True)
rurl = f"https://api.smmry.com/?SM_API_KEY={config['api-keys']['sm']}"
if len(command) >= 4:
rurl += "&SM_LENGTH=" + command[3]
if len(command) >= 5:
rurl += "&SM_KEYWORD_COUNT=" + command[4]
rurl += "&SM_URL=" + url
r = requests.get(rurl)
data = r.json()
if "sm_api_error" in data:
error = data["sm_api_error"]
if error == 0:
await send(message, "Internal server problem with the SMMRY API; this is not your fault. Try again later.", reaction = "x")
elif error == 1:
await send(message, "Parameters are invalid. Check that you entered a real URL; otherwise, contact a developer.", reaction = "x")
elif error == 2:
await send(message, "This request has intentionally been restricted. Perhaps you have expended the API key's limit (100 per day).", reaction = "x")
elif error == 3:
await send(message, "Summarization error. This website might not be summarizable.")
else:
await send(message, (f"**{data['sm_api_title'].strip() or '(no title)'}**\n\n{data['sm_api_content'].strip() or '(no content)'}")[:2000])
if "sm_api_keyword_array" in data:
await message.channel.send(f"**Keywords**: {', '.join(data['sm_api_keyword_array'])}")
@client.command("", ["tsr", "?"], "", "")
async def command_toggle_suppress_reacts(command, message):
member = await get_member(message.guild, command[2], message.author)
await mod_data("tsr", lambda x: x ^ {member.id}, default = set())
await message.add_reaction("✅")
@client.command("", ["react", "..."], "", "")
async def command_react(command, message):
if not message.reference or not message.reference.resolved:
raise BotError("You need to refer to a message via reply!")
fails = []
for x in command[2:]:
try:
await message.reference.resolved.add_reaction(emoji(x))
except:
fails.append(x)
if fails:
await send(message, "The following emojis do not exist / could not have been added: " + ", ".join(fails))
else:
await message.delete()
# @client.command("", re.compile(r"\b[hH]?[eE][hH][eE]\b").search, "", "")
async def command_ehe_te_nandayo(command, message):
if message.author != client.user and time.time() - await get_data("ehe", message.author.id, default = 0) > (await get_data("ehecd", default = 30)):
await send(message, "**ehe te nandayo!?**", reaction = "?")
await set_data("ehe", message.author.id, time.time())
# @client.command("", re.compile(r"\[\w+\]").search, "", "")
async def command_emoji_react(command, message):
for c in re.findall(r"\[(\w+)\]", message.content):
try:
await message.add_reaction(emoji(c))
except:
pass
# @client.command("", re.compile(r"\b[Aa][Oo][Cc]\b").search, "", "")
async def command_aoc(command, message):
await message.channel.send("Alexandria Ocasio-Cortez")
# @client.command("", ["toggle69"], "", "")
async def command_toggle69(command, message):
await set_data("disable_69", not await get_data("disable_69", default = False))
await message.add_reaction("✅")
# @client.command("", re.compile(r"\b69\b").search, "", "")
async def command_69(command, message):
if await get_data("disable_69", default = False):
return
await message.reply("nice", mention_author = False) | it anagram_function(message, hint = True)
@ |
exceptions.py | #!/usr/bin/env python
# __author__ = "Ronie Martinez"
# __copyright__ = "Copyright 2020, Ronie Martinez"
# __credits__ = ["Ronie Martinez"]
# __maintainer__ = "Ronie Martinez"
# __email__ = "[email protected]"
class CountryCodeNotFound(Exception): | pass
class CountryNotFound(Exception):
pass | |
mod.rs | mod default;
mod deserializer;
pub use self::default::Options;
pub use self::deserializer::from_env;
pub mod common;
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::*;
use serde::Deserialize;
#[derive(Debug, Deserialize, PartialEq)]
#[serde(untagged)]
#[serde(field_identifier, rename_all = "lowercase")]
enum Debug {
All,
LLB,
Frontend,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "kebab-case")]
struct CustomOptions {
filename: Option<PathBuf>,
verbosity: u32,
#[serde(default)]
debug: Vec<Debug>,
#[serde(default)]
cache_imports: Vec<common::CacheOptionsEntry>,
}
#[test]
fn custom_options() {
let env = vec![
(
"BUILDKIT_FRONTEND_OPT_0".into(),
"filename=/path/to/Dockerfile".into(),
),
(
"BUILDKIT_FRONTEND_OPT_1".into(),
"debug=llb,frontend".into(),
),
(
"BUILDKIT_FRONTEND_OPT_2".into(),
r#"cache-imports=[{"Type":"local","Attrs":{"src":"cache"}}]"#.into(),
),
(
"BUILDKIT_FRONTEND_OPT_3".into(),
"verbosity=12345678".into(),
),
];
assert_eq!(
from_env::<CustomOptions, _>(env.into_iter()).unwrap(),
CustomOptions {
filename: Some(PathBuf::from("/path/to/Dockerfile")),
verbosity: 12_345_678,
debug: vec![Debug::LLB, Debug::Frontend],
cache_imports: vec![common::CacheOptionsEntry {
cache_type: common::CacheType::Local,
attrs: vec![("src".into(), "cache".into())].into_iter().collect()
}],
}
);
}
#[test]
fn env_variable_names() {
let env = vec![
(
"ANOTHER_OPT_0".into(),
"filename=/path/to/Dockerfile".into(),
),
(
"ANOTHER_OPT_2".into(), | "BUILDKIT_FRONTEND_OPT_2".into(),
"verbosity=12345678".into(),
),
];
assert_eq!(
from_env::<CustomOptions, _>(env.into_iter()).unwrap(),
CustomOptions {
filename: None,
verbosity: 12_345_678,
debug: vec![Debug::All],
cache_imports: vec![],
}
);
}
#[test]
fn empty_cache() {
let env = vec![
("BUILDKIT_FRONTEND_OPT_1".into(), "cache-imports=".into()),
(
"BUILDKIT_FRONTEND_OPT_2".into(),
"verbosity=12345678".into(),
),
];
assert_eq!(
from_env::<CustomOptions, _>(env.into_iter()).unwrap(),
CustomOptions {
filename: None,
verbosity: 12_345_678,
debug: vec![],
cache_imports: vec![],
}
);
}
} | r#"cache-imports=[{"Type":"local","Attrs":{"src":"cache"}}]"#.into(),
),
("BUILDKIT_FRONTEND_OPT_1".into(), "debug=all".into()),
( |
server_test.go | package worker_test
import (
"context"
"encoding/json"
"fmt"
"net/http"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/osbuild/osbuild-composer/internal/distro"
"github.com/osbuild/osbuild-composer/internal/distro/test_distro"
"github.com/osbuild/osbuild-composer/internal/jobqueue"
"github.com/osbuild/osbuild-composer/internal/jobqueue/fsjobqueue"
"github.com/osbuild/osbuild-composer/internal/osbuild2"
"github.com/osbuild/osbuild-composer/internal/rpmmd"
"github.com/osbuild/osbuild-composer/internal/test"
"github.com/osbuild/osbuild-composer/internal/worker"
"github.com/osbuild/osbuild-composer/internal/worker/clienterrors"
)
func newTestServer(t *testing.T, tempdir string, jobRequestTimeout time.Duration, basePath string) *worker.Server {
q, err := fsjobqueue.New(tempdir)
if err != nil {
t.Fatalf("error creating fsjobqueue: %v", err)
}
config := worker.Config{
RequestJobTimeout: jobRequestTimeout,
BasePath: basePath,
}
return worker.NewServer(nil, q, config)
}
// Ensure that the status request returns OK.
func TestStatus(t *testing.T) {
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
handler := server.Handler()
test.TestRoute(t, handler, false, "GET", "/api/worker/v1/status", ``, http.StatusOK, `{"status":"OK", "href": "/api/worker/v1/status", "kind":"Status"}`, "message", "id")
}
func TestErrors(t *testing.T) {
var cases = []struct {
Method string
Path string
Body string
ExpectedStatus int
}{
// Bogus path
{"GET", "/api/worker/v1/foo", ``, http.StatusNotFound},
// Create job with invalid body
{"POST", "/api/worker/v1/jobs", ``, http.StatusBadRequest},
// Wrong method
{"GET", "/api/worker/v1/jobs", ``, http.StatusMethodNotAllowed},
// Update job with invalid ID
{"PATCH", "/api/worker/v1/jobs/foo", `{"status":"FINISHED"}`, http.StatusBadRequest},
// Update job that does not exist, with invalid body
{"PATCH", "/api/worker/v1/jobs/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", ``, http.StatusBadRequest},
// Update job that does not exist
{"PATCH", "/api/worker/v1/jobs/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", `{"status":"FINISHED"}`, http.StatusNotFound},
}
tempdir := t.TempDir()
for _, c := range cases {
server := newTestServer(t, tempdir, time.Duration(0), "/api/worker/v1")
handler := server.Handler()
test.TestRoute(t, handler, false, c.Method, c.Path, c.Body, c.ExpectedStatus, `{"kind":"Error"}`, "message", "href", "operation_id", "reason", "id", "code")
}
}
func TestErrorsAlteredBasePath(t *testing.T) {
var cases = []struct {
Method string
Path string
Body string
ExpectedStatus int
}{
// Bogus path
{"GET", "/api/image-builder-worker/v1/foo", ``, http.StatusNotFound},
// Create job with invalid body
{"POST", "/api/image-builder-worker/v1/jobs", ``, http.StatusBadRequest},
// Wrong method
{"GET", "/api/image-builder-worker/v1/jobs", ``, http.StatusMethodNotAllowed},
// Update job with invalid ID
{"PATCH", "/api/image-builder-worker/v1/jobs/foo", `{"status":"FINISHED"}`, http.StatusBadRequest},
// Update job that does not exist, with invalid body
{"PATCH", "/api/image-builder-worker/v1/jobs/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", ``, http.StatusBadRequest},
// Update job that does not exist
{"PATCH", "/api/image-builder-worker/v1/jobs/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", `{"status":"FINISHED"}`, http.StatusNotFound},
}
tempdir := t.TempDir()
for _, c := range cases {
server := newTestServer(t, tempdir, time.Duration(0), "/api/image-builder-worker/v1")
handler := server.Handler()
test.TestRoute(t, handler, false, c.Method, c.Path, c.Body, c.ExpectedStatus, `{"kind":"Error"}`, "message", "href", "operation_id", "reason", "id", "code")
}
}
func TestCreate(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
imageType, err := arch.GetImageType(test_distro.TestImageTypeName)
if err != nil {
t.Fatalf("error getting image type from arch: %v", err)
}
manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0)
if err != nil {
t.Fatalf("error creating osbuild manifest: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
handler := server.Handler()
_, err = server.EnqueueOSBuild(arch.Name(), &worker.OSBuildJob{Manifest: manifest}, "")
require.NoError(t, err)
test.TestRoute(t, handler, false, "POST", "/api/worker/v1/jobs",
fmt.Sprintf(`{"types":["osbuild"],"arch":"%s"}`, test_distro.TestArchName), http.StatusCreated,
`{"kind":"RequestJob","href":"/api/worker/v1/jobs","type":"osbuild","args":{"manifest":{"pipeline":{},"sources":{}}}}`, "id", "location", "artifact_location")
}
func TestCancel(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
imageType, err := arch.GetImageType(test_distro.TestImageTypeName)
if err != nil {
t.Fatalf("error getting image type from arch: %v", err)
}
manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0)
if err != nil {
t.Fatalf("error creating osbuild manifest: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
handler := server.Handler()
jobId, err := server.EnqueueOSBuild(arch.Name(), &worker.OSBuildJob{Manifest: manifest}, "")
require.NoError(t, err)
j, token, typ, args, dynamicArgs, err := server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""})
require.NoError(t, err)
require.Equal(t, jobId, j)
require.Equal(t, "osbuild", typ)
require.NotNil(t, args)
require.Nil(t, dynamicArgs)
test.TestRoute(t, handler, false, "GET", fmt.Sprintf("/api/worker/v1/jobs/%s", token), `{}`, http.StatusOK,
fmt.Sprintf(`{"canceled":false,"href":"/api/worker/v1/jobs/%s","id":"%s","kind":"JobStatus"}`, token, token))
err = server.Cancel(jobId)
require.NoError(t, err)
test.TestRoute(t, handler, false, "GET", fmt.Sprintf("/api/worker/v1/jobs/%s", token), `{}`, http.StatusOK,
fmt.Sprintf(`{"canceled":true,"href":"/api/worker/v1/jobs/%s","id":"%s","kind":"JobStatus"}`, token, token))
}
func TestUpdate(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
imageType, err := arch.GetImageType(test_distro.TestImageTypeName)
if err != nil {
t.Fatalf("error getting image type from arch: %v", err)
}
manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0)
if err != nil {
t.Fatalf("error creating osbuild manifest: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
handler := server.Handler()
jobId, err := server.EnqueueOSBuild(arch.Name(), &worker.OSBuildJob{Manifest: manifest}, "")
require.NoError(t, err)
j, token, typ, args, dynamicArgs, err := server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""})
require.NoError(t, err)
require.Equal(t, jobId, j)
require.Equal(t, "osbuild", typ)
require.NotNil(t, args)
require.Nil(t, dynamicArgs)
test.TestRoute(t, handler, false, "PATCH", fmt.Sprintf("/api/worker/v1/jobs/%s", token), `{}`, http.StatusOK,
fmt.Sprintf(`{"href":"/api/worker/v1/jobs/%s","id":"%s","kind":"UpdateJobResponse"}`, token, token))
test.TestRoute(t, handler, false, "PATCH", fmt.Sprintf("/api/worker/v1/jobs/%s", token), `{}`, http.StatusNotFound,
`{"href":"/api/worker/v1/errors/5","code":"IMAGE-BUILDER-WORKER-5","id":"5","kind":"Error","message":"Token not found","reason":"Token not found"}`,
"operation_id")
}
func TestArgs(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
require.NoError(t, err)
imageType, err := arch.GetImageType(test_distro.TestImageTypeName)
require.NoError(t, err)
manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0)
require.NoError(t, err)
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
job := worker.OSBuildJob{
Manifest: manifest,
ImageName: "test-image",
PipelineNames: &worker.PipelineNames{
Build: []string{"b"},
Payload: []string{"x", "y", "z"},
},
}
jobId, err := server.EnqueueOSBuild(arch.Name(), &job, "")
require.NoError(t, err)
_, _, _, args, _, err := server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""})
require.NoError(t, err)
require.NotNil(t, args)
var jobArgs worker.OSBuildJob
err = server.OSBuildJob(jobId, &jobArgs)
require.NoError(t, err)
require.Equal(t, job, jobArgs)
}
func TestUpload(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
imageType, err := arch.GetImageType(test_distro.TestImageTypeName)
if err != nil {
t.Fatalf("error getting image type from arch: %v", err)
}
manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0)
if err != nil {
t.Fatalf("error creating osbuild manifest: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
handler := server.Handler()
jobID, err := server.EnqueueOSBuild(arch.Name(), &worker.OSBuildJob{Manifest: manifest}, "")
require.NoError(t, err)
j, token, typ, args, dynamicArgs, err := server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""})
require.NoError(t, err)
require.Equal(t, jobID, j)
require.Equal(t, "osbuild", typ)
require.NotNil(t, args)
require.Nil(t, dynamicArgs)
test.TestRoute(t, handler, false, "PUT", fmt.Sprintf("/api/worker/v1/jobs/%s/artifacts/foobar", token), `this is my artifact`, http.StatusOK, `?`)
}
func TestUploadAlteredBasePath(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
imageType, err := arch.GetImageType(test_distro.TestImageTypeName)
if err != nil {
t.Fatalf("error getting image type from arch: %v", err)
}
manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0)
if err != nil {
t.Fatalf("error creating osbuild manifest: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/image-builder-worker/v1")
handler := server.Handler()
jobID, err := server.EnqueueOSBuild(arch.Name(), &worker.OSBuildJob{Manifest: manifest}, "")
require.NoError(t, err)
j, token, typ, args, dynamicArgs, err := server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""})
require.NoError(t, err)
require.Equal(t, jobID, j)
require.Equal(t, "osbuild", typ)
require.NotNil(t, args)
require.Nil(t, dynamicArgs)
test.TestRoute(t, handler, false, "PUT", fmt.Sprintf("/api/image-builder-worker/v1/jobs/%s/artifacts/foobar", token), `this is my artifact`, http.StatusOK, `?`)
}
func TestTimeout(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Millisecond*10, "/api/image-builder-worker/v1")
_, _, _, _, _, err = server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""})
require.Equal(t, jobqueue.ErrDequeueTimeout, err)
test.TestRoute(t, server.Handler(), false, "POST", "/api/image-builder-worker/v1/jobs", `{"arch":"arch","types":["types"]}`, http.StatusNoContent,
`{"href":"/api/image-builder-worker/v1/jobs","id":"00000000-0000-0000-0000-000000000000","kind":"RequestJob"}`)
}
func TestRequestJobById(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
handler := server.Handler()
depsolveJobId, err := server.EnqueueDepsolve(&worker.DepsolveJob{}, "")
require.NoError(t, err)
jobId, err := server.EnqueueManifestJobByID(&worker.ManifestJobByID{}, depsolveJobId, "")
require.NoError(t, err)
test.TestRoute(t, server.Handler(), false, "POST", "/api/worker/v1/jobs", `{"arch":"arch","types":["manifest-id-only"]}`, http.StatusBadRequest,
`{"href":"/api/worker/v1/errors/15","kind":"Error","id": "15","code":"IMAGE-BUILDER-WORKER-15"}`, "operation_id", "reason", "message")
_, _, _, _, _, err = server.RequestJobById(context.Background(), arch.Name(), jobId)
require.Error(t, jobqueue.ErrNotPending, err)
_, token, _, _, _, err := server.RequestJob(context.Background(), arch.Name(), []string{"depsolve"}, []string{""})
require.NoError(t, err)
depsolveJR, err := json.Marshal(worker.DepsolveJobResult{})
require.NoError(t, err)
err = server.FinishJob(token, depsolveJR)
require.NoError(t, err)
j, token, typ, args, dynamicArgs, err := server.RequestJobById(context.Background(), arch.Name(), jobId)
require.NoError(t, err)
require.Equal(t, jobId, j)
require.Equal(t, "manifest-id-only", typ)
require.NotNil(t, args)
require.NotNil(t, dynamicArgs)
test.TestRoute(t, handler, false, "GET", fmt.Sprintf("/api/worker/v1/jobs/%s", token), `{}`, http.StatusOK,
fmt.Sprintf(`{"canceled":false,"href":"/api/worker/v1/jobs/%s","id":"%s","kind":"JobStatus"}`, token, token))
}
// Enqueue OSBuild jobs with and without additional data and read them off the queue to
// check if the fallbacks are added for the old job and the new data are kept
// for the new job.
func TestMixedOSBuildJob(t *testing.T) {
require := require.New(t)
emptyManifestV2 := distro.Manifest(`{"version":"2","pipelines":{}}`)
server := newTestServer(t, t.TempDir(), time.Millisecond*10, "/")
fbPipelines := &worker.PipelineNames{Build: distro.BuildPipelinesFallback(), Payload: distro.PayloadPipelinesFallback()}
oldJob := worker.OSBuildJob{
Manifest: emptyManifestV2,
ImageName: "no-pipeline-names",
}
oldJobID, err := server.EnqueueOSBuild("x", &oldJob, "")
require.NoError(err)
newJob := worker.OSBuildJob{
Manifest: emptyManifestV2,
ImageName: "with-pipeline-names",
PipelineNames: &worker.PipelineNames{
Build: []string{"build"},
Payload: []string{"other", "pipelines"},
},
}
newJobID, err := server.EnqueueOSBuild("x", &newJob, "")
require.NoError(err)
var oldJobRead worker.OSBuildJob
err = server.OSBuildJob(oldJobID, &oldJobRead)
require.NoError(err)
require.NotNil(oldJobRead.PipelineNames)
// OldJob gets default pipeline names when read
require.Equal(fbPipelines, oldJobRead.PipelineNames)
require.Equal(oldJob.Manifest, oldJobRead.Manifest)
require.Equal(oldJob.ImageName, oldJobRead.ImageName)
// Not entirely equal
require.NotEqual(oldJob, oldJobRead)
// NewJob the same when read back
var newJobRead worker.OSBuildJob
err = server.OSBuildJob(newJobID, &newJobRead)
require.NoError(err)
require.NotNil(newJobRead.PipelineNames)
require.Equal(newJob.PipelineNames, newJobRead.PipelineNames)
// Dequeue the jobs (via RequestJob) to get their tokens and update them to
// test the result retrieval
getJob := func() (uuid.UUID, uuid.UUID) {
// don't block forever if the jobs weren't added or can't be retrieved
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
id, token, _, _, _, err := server.RequestJob(ctx, "x", []string{"osbuild"}, []string{""})
require.NoError(err)
return id, token
}
getJobTokens := func(n uint) map[uuid.UUID]uuid.UUID {
tokens := make(map[uuid.UUID]uuid.UUID, n)
for idx := uint(0); idx < n; idx++ {
id, token := getJob()
tokens[id] = token
}
return tokens
}
jobTokens := getJobTokens(2)
// make sure we got them both as expected
require.Contains(jobTokens, oldJobID)
require.Contains(jobTokens, newJobID)
oldJobResult := &worker.OSBuildJobResult{
Success: true,
OSBuildOutput: &osbuild2.Result{
Type: "result",
Success: true,
Log: map[string]osbuild2.PipelineResult{
"build-old": {
osbuild2.StageResult{
ID: "---",
Type: "org.osbuild.test",
Output: "<test output>",
Success: true,
}, | },
},
}
oldJobResultRaw, err := json.Marshal(oldJobResult)
require.NoError(err)
oldJobToken := jobTokens[oldJobID]
err = server.FinishJob(oldJobToken, oldJobResultRaw)
require.NoError(err)
oldJobResultRead := new(worker.OSBuildJobResult)
_, _, err = server.OSBuildJobStatus(oldJobID, oldJobResultRead)
require.NoError(err)
// oldJobResultRead should have PipelineNames now
require.NotEqual(oldJobResult, oldJobResultRead)
require.Equal(fbPipelines, oldJobResultRead.PipelineNames)
require.NotNil(oldJobResultRead.PipelineNames)
require.Equal(oldJobResult.OSBuildOutput, oldJobResultRead.OSBuildOutput)
require.Equal(oldJobResult.Success, oldJobResultRead.Success)
newJobResult := &worker.OSBuildJobResult{
Success: true,
PipelineNames: &worker.PipelineNames{
Build: []string{"build-result"},
Payload: []string{"result-test-payload", "result-test-assembler"},
},
OSBuildOutput: &osbuild2.Result{
Type: "result",
Success: true,
Log: map[string]osbuild2.PipelineResult{
"build-new": {
osbuild2.StageResult{
ID: "---",
Type: "org.osbuild.test",
Output: "<test output new>",
Success: true,
},
},
},
},
}
newJobResultRaw, err := json.Marshal(newJobResult)
require.NoError(err)
newJobToken := jobTokens[newJobID]
err = server.FinishJob(newJobToken, newJobResultRaw)
require.NoError(err)
newJobResultRead := new(worker.OSBuildJobResult)
_, _, err = server.OSBuildJobStatus(newJobID, newJobResultRead)
require.NoError(err)
require.Equal(newJobResult, newJobResultRead)
}
// Enqueue Koji jobs with and without additional data and read them off the queue to
// check if the fallbacks are added for the old job and the new data are kept
// for the new job.
func TestMixedOSBuildKojiJob(t *testing.T) {
require := require.New(t)
emptyManifestV2 := distro.Manifest(`{"version":"2","pipelines":{}}`)
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
fbPipelines := &worker.PipelineNames{Build: distro.BuildPipelinesFallback(), Payload: distro.PayloadPipelinesFallback()}
enqueueKojiJob := func(job *worker.OSBuildKojiJob) uuid.UUID {
initJob := new(worker.KojiInitJob)
initJobID, err := server.EnqueueKojiInit(initJob, "")
require.NoError(err)
jobID, err := server.EnqueueOSBuildKoji("k", job, initJobID, "")
require.NoError(err)
return jobID
}
oldJob := worker.OSBuildKojiJob{
Manifest: emptyManifestV2,
ImageName: "no-pipeline-names",
}
oldJobID := enqueueKojiJob(&oldJob)
newJob := worker.OSBuildKojiJob{
Manifest: emptyManifestV2,
ImageName: "with-pipeline-names",
PipelineNames: &worker.PipelineNames{
Build: []string{"build"},
Payload: []string{"other", "pipelines"},
},
}
newJobID := enqueueKojiJob(&newJob)
var oldJobRead worker.OSBuildKojiJob
err := server.OSBuildKojiJob(oldJobID, &oldJobRead)
require.NoError(err)
require.NotNil(oldJobRead.PipelineNames)
// OldJob gets default pipeline names when read
require.Equal(fbPipelines, oldJobRead.PipelineNames)
require.Equal(oldJob.Manifest, oldJobRead.Manifest)
require.Equal(oldJob.ImageName, oldJobRead.ImageName)
// Not entirely equal
require.NotEqual(oldJob, oldJobRead)
// NewJob the same when read back
var newJobRead worker.OSBuildKojiJob
err = server.OSBuildKojiJob(newJobID, &newJobRead)
require.NoError(err)
require.NotNil(newJobRead.PipelineNames)
require.Equal(newJob.PipelineNames, newJobRead.PipelineNames)
// Dequeue the jobs (via RequestJob) to get their tokens and update them to
// test the result retrieval
// Finish init jobs
for idx := uint(0); idx < 2; idx++ {
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
_, token, _, _, _, err := server.RequestJob(ctx, "k", []string{"koji-init"}, []string{""})
require.NoError(err)
require.NoError(server.FinishJob(token, nil))
}
getJob := func() (uuid.UUID, uuid.UUID) {
// don't block forever if the jobs weren't added or can't be retrieved
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
id, token, _, _, _, err := server.RequestJob(ctx, "k", []string{"osbuild-koji"}, []string{""})
require.NoError(err)
return id, token
}
getJobTokens := func(n uint) map[uuid.UUID]uuid.UUID {
tokens := make(map[uuid.UUID]uuid.UUID, n)
for idx := uint(0); idx < n; idx++ {
id, token := getJob()
tokens[id] = token
}
return tokens
}
jobTokens := getJobTokens(2)
// make sure we got them both as expected
require.Contains(jobTokens, oldJobID)
require.Contains(jobTokens, newJobID)
oldJobResult := &worker.OSBuildKojiJobResult{
HostOS: "rhel-10",
Arch: "k",
OSBuildOutput: &osbuild2.Result{
Type: "result",
Success: true,
Log: map[string]osbuild2.PipelineResult{
"build-old": {
osbuild2.StageResult{
ID: "---",
Type: "org.osbuild.test",
Output: "<test output>",
Success: true,
},
},
},
},
}
oldJobResultRaw, err := json.Marshal(oldJobResult)
require.NoError(err)
oldJobToken := jobTokens[oldJobID]
err = server.FinishJob(oldJobToken, oldJobResultRaw)
require.NoError(err)
oldJobResultRead := new(worker.OSBuildKojiJobResult)
_, _, err = server.OSBuildKojiJobStatus(oldJobID, oldJobResultRead)
require.NoError(err)
// oldJobResultRead should have PipelineNames now
require.NotEqual(oldJobResult, oldJobResultRead)
require.Equal(fbPipelines, oldJobResultRead.PipelineNames)
require.NotNil(oldJobResultRead.PipelineNames)
require.Equal(oldJobResult.OSBuildOutput, oldJobResultRead.OSBuildOutput)
require.Equal(oldJobResult.HostOS, oldJobResultRead.HostOS)
require.Equal(oldJobResult.Arch, oldJobResultRead.Arch)
newJobResult := &worker.OSBuildKojiJobResult{
HostOS: "rhel-10",
Arch: "k",
PipelineNames: &worker.PipelineNames{
Build: []string{"build-result"},
Payload: []string{"result-test-payload", "result-test-assembler"},
},
OSBuildOutput: &osbuild2.Result{
Type: "result",
Success: true,
Log: map[string]osbuild2.PipelineResult{
"build-new": {
osbuild2.StageResult{
ID: "---",
Type: "org.osbuild.test",
Output: "<test output new>",
Success: true,
},
},
},
},
}
newJobResultRaw, err := json.Marshal(newJobResult)
require.NoError(err)
newJobToken := jobTokens[newJobID]
err = server.FinishJob(newJobToken, newJobResultRaw)
require.NoError(err)
newJobResultRead := new(worker.OSBuildKojiJobResult)
_, _, err = server.OSBuildKojiJobStatus(newJobID, newJobResultRead)
require.NoError(err)
require.Equal(newJobResult, newJobResultRead)
}
func TestDepsolveLegacyErrorConversion(t *testing.T) {
distroStruct := test_distro.New()
arch, err := distroStruct.GetArch(test_distro.TestArchName)
if err != nil {
t.Fatalf("error getting arch from distro: %v", err)
}
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
depsolveJobId, err := server.EnqueueDepsolve(&worker.DepsolveJob{}, "")
require.NoError(t, err)
_, _, _, _, _, err = server.RequestJob(context.Background(), arch.Name(), []string{"depsolve"}, []string{""})
require.NoError(t, err)
reason := "Depsolve failed"
errType := worker.DepsolveErrorType
expectedResult := worker.DepsolveJobResult{
Error: reason,
ErrorType: errType,
JobResult: worker.JobResult{
JobError: clienterrors.WorkerClientError(clienterrors.ErrorDNFDepsolveError, reason),
},
}
depsolveJobResult := worker.DepsolveJobResult{
Error: reason,
ErrorType: errType,
}
_, _, err = server.DepsolveJobStatus(depsolveJobId, &depsolveJobResult)
require.NoError(t, err)
require.Equal(t, expectedResult, depsolveJobResult)
}
// Enquueue OSBuild jobs and save both kinds of
// error types (old & new) to the queue and ensure
// that both kinds of errors can be read back
func TestMixedOSBuildJobErrors(t *testing.T) {
require := require.New(t)
emptyManifestV2 := distro.Manifest(`{"version":"2","pipelines":{}}`)
server := newTestServer(t, t.TempDir(), time.Millisecond*10, "/")
oldJob := worker.OSBuildJob{
Manifest: emptyManifestV2,
ImageName: "no-pipeline-names",
}
oldJobID, err := server.EnqueueOSBuild("x", &oldJob, "")
require.NoError(err)
newJob := worker.OSBuildJob{
Manifest: emptyManifestV2,
ImageName: "with-pipeline-names",
PipelineNames: &worker.PipelineNames{
Build: []string{"build"},
Payload: []string{"other", "pipelines"},
},
}
newJobID, err := server.EnqueueOSBuild("x", &newJob, "")
require.NoError(err)
oldJobRead := new(worker.OSBuildJob)
err = server.OSBuildJob(oldJobID, oldJobRead)
require.NoError(err)
// Not entirely equal
require.NotEqual(oldJob, oldJobRead)
// NewJob the same when read back
newJobRead := new(worker.OSBuildJob)
err = server.OSBuildJob(newJobID, newJobRead)
require.NoError(err)
// Dequeue the jobs (via RequestJob) to get their tokens and update them to
// test the result retrieval
getJob := func() (uuid.UUID, uuid.UUID) {
// don't block forever if the jobs weren't added or can't be retrieved
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
id, token, _, _, _, err := server.RequestJob(ctx, "x", []string{"osbuild"}, []string{""})
require.NoError(err)
return id, token
}
getJobTokens := func(n uint) map[uuid.UUID]uuid.UUID {
tokens := make(map[uuid.UUID]uuid.UUID, n)
for idx := uint(0); idx < n; idx++ {
id, token := getJob()
tokens[id] = token
}
return tokens
}
jobTokens := getJobTokens(2)
// make sure we got them both as expected
require.Contains(jobTokens, oldJobID)
require.Contains(jobTokens, newJobID)
oldJobResult := &worker.OSBuildJobResult{
TargetErrors: []string{"Upload error"},
}
oldJobResultRaw, err := json.Marshal(oldJobResult)
require.NoError(err)
oldJobToken := jobTokens[oldJobID]
err = server.FinishJob(oldJobToken, oldJobResultRaw)
require.NoError(err)
oldJobResultRead := new(worker.OSBuildJobResult)
_, _, err = server.OSBuildJobStatus(oldJobID, oldJobResultRead)
require.NoError(err)
require.NotEqual(oldJobResult, oldJobResultRead)
require.Equal(oldJobResult.Success, false)
require.Equal(oldJobResultRead.Success, false)
newJobResult := &worker.OSBuildJobResult{
PipelineNames: &worker.PipelineNames{
Build: []string{"build-result"},
Payload: []string{"result-test-payload", "result-test-assembler"},
},
JobResult: worker.JobResult{
JobError: clienterrors.WorkerClientError(clienterrors.ErrorUploadingImage, "Error uploading image", nil),
},
}
newJobResultRaw, err := json.Marshal(newJobResult)
require.NoError(err)
newJobToken := jobTokens[newJobID]
err = server.FinishJob(newJobToken, newJobResultRaw)
require.NoError(err)
newJobResultRead := new(worker.OSBuildJobResult)
_, _, err = server.OSBuildJobStatus(newJobID, newJobResultRead)
require.NoError(err)
require.Equal(newJobResult, newJobResultRead)
require.Equal(newJobResult.Success, false)
require.Equal(newJobResultRead.Success, false)
}
// Enquueue Koji jobs and save both kinds of
// error types (old & new) to the queue and ensure
// that both kinds of errors can be read back
func TestMixedOSBuildKojiJobErrors(t *testing.T) {
require := require.New(t)
emptyManifestV2 := distro.Manifest(`{"version":"2","pipelines":{}}`)
server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1")
enqueueKojiJob := func(job *worker.OSBuildKojiJob) uuid.UUID {
initJob := new(worker.KojiInitJob)
initJobID, err := server.EnqueueKojiInit(initJob, "")
require.NoError(err)
jobID, err := server.EnqueueOSBuildKoji("k", job, initJobID, "")
require.NoError(err)
return jobID
}
oldJob := worker.OSBuildKojiJob{
Manifest: emptyManifestV2,
ImageName: "no-pipeline-names",
}
oldJobID := enqueueKojiJob(&oldJob)
newJob := worker.OSBuildKojiJob{
Manifest: emptyManifestV2,
ImageName: "with-pipeline-names",
PipelineNames: &worker.PipelineNames{
Build: []string{"build"},
Payload: []string{"other", "pipelines"},
},
}
newJobID := enqueueKojiJob(&newJob)
oldJobRead := new(worker.OSBuildKojiJob)
err := server.OSBuildKojiJob(oldJobID, oldJobRead)
require.NoError(err)
// Not entirely equal
require.NotEqual(oldJob, oldJobRead)
// NewJob the same when read back
newJobRead := new(worker.OSBuildKojiJob)
err = server.OSBuildKojiJob(newJobID, newJobRead)
require.NoError(err)
// Dequeue the jobs (via RequestJob) to get their tokens and update them to
// test the result retrieval
// Finish init jobs
for idx := uint(0); idx < 2; idx++ {
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
_, token, _, _, _, err := server.RequestJob(ctx, "k", []string{"koji-init"}, []string{""})
require.NoError(err)
require.NoError(server.FinishJob(token, nil))
}
getJob := func() (uuid.UUID, uuid.UUID) {
// don't block forever if the jobs weren't added or can't be retrieved
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
defer cancel()
id, token, _, _, _, err := server.RequestJob(ctx, "k", []string{"osbuild-koji"}, []string{""})
require.NoError(err)
return id, token
}
getJobTokens := func(n uint) map[uuid.UUID]uuid.UUID {
tokens := make(map[uuid.UUID]uuid.UUID, n)
for idx := uint(0); idx < n; idx++ {
id, token := getJob()
tokens[id] = token
}
return tokens
}
jobTokens := getJobTokens(2)
// make sure we got them both as expected
require.Contains(jobTokens, oldJobID)
require.Contains(jobTokens, newJobID)
oldJobResult := &worker.OSBuildKojiJobResult{
KojiError: "koji build error",
}
oldJobResultRaw, err := json.Marshal(oldJobResult)
require.NoError(err)
oldJobToken := jobTokens[oldJobID]
err = server.FinishJob(oldJobToken, oldJobResultRaw)
require.NoError(err)
oldJobResultRead := new(worker.OSBuildKojiJobResult)
_, _, err = server.OSBuildKojiJobStatus(oldJobID, oldJobResultRead)
require.NoError(err)
// oldJobResultRead should have PipelineNames now
require.NotEqual(oldJobResult, oldJobResultRead)
newJobResult := &worker.OSBuildKojiJobResult{
PipelineNames: &worker.PipelineNames{
Build: []string{"build-result"},
Payload: []string{"result-test-payload", "result-test-assembler"},
},
JobResult: worker.JobResult{
JobError: clienterrors.WorkerClientError(clienterrors.ErrorKojiBuild, "Koji build error", nil),
},
}
newJobResultRaw, err := json.Marshal(newJobResult)
require.NoError(err)
newJobToken := jobTokens[newJobID]
err = server.FinishJob(newJobToken, newJobResultRaw)
require.NoError(err)
newJobResultRead := new(worker.OSBuildKojiJobResult)
_, _, err = server.OSBuildKojiJobStatus(newJobID, newJobResultRead)
require.NoError(err)
require.Equal(newJobResult, newJobResultRead)
}
// old depsolve job format kept here to test compatibility with older workers
type oldDepsolveJob struct {
PackageSetsChains map[string][]string `json:"package_sets_chains"`
PackageSets map[string]rpmmd.PackageSet `json:"package_sets"`
Repos []rpmmd.RepoConfig `json:"repos"`
ModulePlatformID string `json:"module_platform_id"`
Arch string `json:"arch"`
Releasever string `json:"releasever"`
PackageSetsRepos map[string][]rpmmd.RepoConfig `json:"package_sets_repositories,omitempty"`
}
func TestDepsolveJobArgsCompat(t *testing.T) {
// Test depsolve job argument transition compatibility
//
// NOTE: This test should be removed once all cloud workers are updated to
// use the new structure.
assert := assert.New(t)
// common elements
baseos := rpmmd.RepoConfig{
Name: "baseos",
BaseURL: "https://example.com/baseos",
}
appstream := rpmmd.RepoConfig{
Name: "appstream",
BaseURL: "https://example.com/appstream",
}
user1 := rpmmd.RepoConfig{
Name: "user1",
BaseURL: "https://example.com/user/1",
}
user2 := rpmmd.RepoConfig{
Name: "user2",
BaseURL: "https://example.com/user/2",
}
osIncludes := []string{"os1", "os2", "os3"}
bpIncludes := []string{"bp1", "bp2"}
buildIncludes := []string{"build1", "build2", "build3"}
excludes := []string{"nope1", "nope2"}
newJob := worker.DepsolveJob{
PackageSets: map[string][]rpmmd.PackageSet{
"os": {
{
Include: osIncludes,
Exclude: excludes,
Repositories: []rpmmd.RepoConfig{baseos, appstream},
},
{
Include: bpIncludes,
Repositories: []rpmmd.RepoConfig{baseos, appstream, user1, user2},
},
},
"build": {{
Include: buildIncludes,
Exclude: excludes,
Repositories: []rpmmd.RepoConfig{baseos, appstream},
}},
},
ModulePlatformID: "el9",
Arch: "x86_64",
Releasever: "9",
}
oldJob := oldDepsolveJob{
PackageSetsChains: map[string][]string{
"os": {"os-0", "os-1"},
},
PackageSets: map[string]rpmmd.PackageSet{
"os-0": {
Include: osIncludes,
Exclude: excludes,
},
"os-1": {
Include: bpIncludes,
},
"build": {
Include: buildIncludes,
Exclude: excludes,
},
},
ModulePlatformID: "el9",
Arch: "x86_64",
Releasever: "9",
PackageSetsRepos: map[string][]rpmmd.RepoConfig{
"os-0": {baseos, appstream},
"os-1": {baseos, appstream, user1, user2},
"build": {baseos, appstream},
},
}
{ // old in, old out (not really useful, but let's cover all bases)
oldArgs, err := json.Marshal(oldJob)
assert.NoError(err)
var oldJobW oldDepsolveJob
assert.NoError(json.Unmarshal(oldArgs, &oldJobW))
assert.Equal(oldJob, oldJobW)
}
{ // new in, old out (the important scenario)
newArgs, err := json.Marshal(newJob)
assert.NoError(err)
var oldJobW oldDepsolveJob
assert.NoError(json.Unmarshal(newArgs, &oldJobW))
assert.Equal(oldJob, oldJobW)
}
{ // new in, new out (check if the serialised format also unmarshals back into the new format)
newArgs, err := json.Marshal(newJob)
assert.NoError(err)
var newJobW worker.DepsolveJob
assert.NoError(json.Unmarshal(newArgs, &newJobW))
assert.Equal(newJob, newJobW)
}
} | }, |
gf255e.go | package field
// This file implements computations in the field of integers
// modulo p = 2^255 - 18651.
// =======================================================================
// Field GF255e: integers modulo p = 2^255 - 18651
type GF255e [4]uint64
const mq255e uint64 = 18651
// Field element of value 0.
var GF255e_ZERO = GF255e { 0, 0, 0, 0 }
// Field element of value 1.
var GF255e_ONE = GF255e { 1, 0, 0, 0 }
// Field element of value 3.
var GF255e_THREE = GF255e { 3, 0, 0, 0 }
// Field element of value 7.
var GF255e_SEVEN = GF255e { 7, 0, 0, 0 }
// Field element of value 8.
var GF255e_EIGHT = GF255e { 8, 0, 0, 0 }
// Field element of value 27.
var GF255e_TWENTYSEVEN = GF255e { 27, 0, 0, 0 }
// Field element of value 176.
var GF255e_HUNDREDSEVENTYSIX = GF255e { 176, 0, 0, 0 }
// Field element of value 308.
var GF255e_THREEHUNDREDEIGHT = GF255e { 308, 0, 0, 0 }
// Field element of value 343.
var GF255e_THREEHUNDREDFORTYTHREE = GF255e { 343, 0, 0, 0 } | var GF255e_INVT508 = GF255e {
0xD40D5B5D2BE1CF5D, 0x3B3573987282DD51,
0x3ECCB22800EED6AE, 0x44F35C558E8FAC0B }
// d <- a
func (d *GF255e) Set(a *GF255e) *GF255e {
copy(d[:], a[:])
return d
}
// d <- a + b
func (d *GF255e) Add(a, b *GF255e) *GF255e {
gf_add((*[4]uint64)(d), (*[4]uint64)(a), (*[4]uint64)(b), mq255e)
return d
}
// d <- a - b
func (d *GF255e) Sub(a, b *GF255e) *GF255e {
gf_sub((*[4]uint64)(d), (*[4]uint64)(a), (*[4]uint64)(b), mq255e)
return d
}
// d <- -a
func (d *GF255e) Neg(a *GF255e) *GF255e {
gf_neg((*[4]uint64)(d), (*[4]uint64)(a), mq255e)
return d
}
// If ctl == 1: d <- a
// If ctl == 0: d <- b
// ctl MUST be 0 or 1
func (d *GF255e) Select(a, b *GF255e, ctl uint64) *GF255e {
gf_select((*[4]uint64)(d), (*[4]uint64)(a), (*[4]uint64)(b), ctl)
return d
}
// d <- d OR (a AND mask)
// mask value should be 0 or 0xFFFFFFFFFFFFFFFF
func (d *GF255e) CondOrFrom(a *GF255e, mask uint64) *GF255e {
d[0] |= mask & a[0]
d[1] |= mask & a[1]
d[2] |= mask & a[2]
d[3] |= mask & a[3]
return d
}
// If ctl == 1: d <- -a
// If ctl == 0: d <- a
// ctl MUST be 0 or 1
func (d *GF255e) CondNeg(a *GF255e, ctl uint64) *GF255e {
gf_condneg((*[4]uint64)(d), (*[4]uint64)(a), mq255e, ctl)
return d
}
// d <- a*b
func (d *GF255e) Mul(a, b *GF255e) *GF255e {
gf_mul((*[4]uint64)(d), (*[4]uint64)(a), (*[4]uint64)(b), mq255e)
return d
}
// d <- a^2
func (d *GF255e) Sqr(a *GF255e) *GF255e {
gf_sqr((*[4]uint64)(d), (*[4]uint64)(a), mq255e)
return d
}
// d <- a^(2^n) for any n >= 0
// This is constant-time with regard to a and d, but not to n.
func (d *GF255e) SqrX(a *GF255e, n uint) *GF255e {
gf_sqr_x((*[4]uint64)(d), (*[4]uint64)(a), n, mq255e)
return d
}
// d <- a/2
func (d *GF255e) Half(a *GF255e) *GF255e {
gf_half((*[4]uint64)(d), (*[4]uint64)(a), mq255e)
return d
}
// d <- a*2^n
// n must be between 1 and 15 (inclusive)
func (d *GF255e) Lsh(a *GF255e, n uint) *GF255e {
gf_lsh((*[4]uint64)(d), (*[4]uint64)(a), n, mq255e)
return d
}
// d <- 1/a (if a == 0, this sets d to 0)
func (d *GF255e) Inv(a *GF255e) *GF255e {
gf_inv_scaled((*[4]uint64)(d), (*[4]uint64)(a), mq255e)
gf_mul((*[4]uint64)(d), (*[4]uint64)(d), (*[4]uint64)(&GF255e_INVT508), mq255e)
return d
}
// Returns 1 if d == 0, or 0 otherwise.
func (d *GF255e) IsZero() uint64 {
return gf_iszero((*[4]uint64)(d), mq255e)
}
// Returns 1 if d == a, or 0 otherwise.
func (d *GF255e) Eq(a *GF255e) uint64 {
return gf_eq((*[4]uint64)(d), (*[4]uint64)(a), mq255e)
}
// Legendre symbol computation; return value:
// 0 if d == 0
// 1 if d != 0 and is a quadratic residue
// -1 if d != 0 and is a not a quadratic residue
// Value is returned as uint64, i.e. 0xFFFFFFFFFFFFFFFF for non-squares.
func (d *GF255e) Legendre() uint64 {
return gf_legendre((*[4]uint64)(d), mq255e)
}
// Square root computation. If the source value (a) is a quadratic
// residue, then this function sets this object (d) to a square root
// of a, and returns 1; otherwise, it sets d to zero and returns 0.
// When the input is a square, this function returns the square root
// whose least significant bit (as an integer in the 0..p-1 range) is zero.
func (d *GF255e) Sqrt(a *GF255e) uint64 {
// Since p = 5 mod 8, we use Atkin's algorithm:
// b <- (2*a)^((p-5)/8)
// c <- 2*a*b^2
// return a*b*(c - 1)
var b, c, e, x, x2, x96, y [4]uint64
// e <- 2*a
gf_lsh(&e, (*[4]uint64)(a), 1, mq255e)
// Raise e to the power (p-5)/8. We use an addition chain with
// 251 squarings and 13 extra multiplications:
// (p-5)/8 = (2^240-1)*2^12 + (2^2-1)*2^9 + (2^3-1)*2^5 + 2^2
// x2 <- e^3
gf_sqr(&x2, &e, mq255e)
gf_mul(&x2, &x2, &e, mq255e)
// x <- e^(2^4-1)
gf_sqr_x(&x, &x2, 2, mq255e)
gf_mul(&x, &x, &x2, mq255e)
// x <- e^(2^8-1)
gf_sqr_x(&y, &x, 4, mq255e)
gf_mul(&x, &y, &x, mq255e)
// x <- e^(2^16-1)
gf_sqr_x(&y, &x, 8, mq255e)
gf_mul(&x, &y, &x, mq255e)
// x <- e^(2^48-1)
gf_sqr_x(&y, &x, 16, mq255e)
gf_mul(&y, &y, &x, mq255e)
gf_sqr_x(&y, &y, 16, mq255e)
gf_mul(&x, &y, &x, mq255e)
// x96 <- e^(2^96-1)
gf_sqr_x(&y, &x, 48, mq255e)
gf_mul(&x96, &y, &x, mq255e)
// x <- e^(2^240-1)
gf_sqr_x(&y, &x96, 96, mq255e)
gf_mul(&y, &y, &x96, mq255e)
gf_sqr_x(&y, &y, 48, mq255e)
gf_mul(&x, &y, &x, mq255e)
// x <- e^((p-5)/8)
gf_sqr_x(&x, &x, 3, mq255e)
gf_mul(&x, &x, &x2, mq255e)
gf_sqr_x(&x, &x, 2, mq255e)
gf_mul(&x, &x, &e, mq255e)
gf_sqr_x(&x, &x, 2, mq255e)
gf_mul(&x, &x, &x2, mq255e)
gf_sqr_x(&x, &x, 3, mq255e)
gf_mul(&x, &x, &e, mq255e)
gf_sqr_x(&b, &x, 2, mq255e)
// We now have b = (2*a)^((p-5)/8).
// c <- 2*a*b^2
gf_sqr(&c, &b, mq255e)
gf_mul(&c, &c, &e, mq255e)
// x <- a*b*(c - 1)
gf_sub(&x, &c, (*[4]uint64)(&GF255e_ONE), mq255e)
gf_mul(&x, &x, (*[4]uint64)(a), mq255e)
gf_mul(&x, &x, &b, mq255e)
// Verify the result. If not square, then set the result to 0.
gf_sqr(&y, &x, mq255e)
qr := gf_eq(&y, (*[4]uint64)(a), mq255e)
for i := 0; i < 4; i ++ {
x[i] &= -qr
}
// Normalize the result, and negate the value if the least
// significant bit is 1.
gf_norm(&x, &x, mq255e)
gf_condneg(&x, &x, mq255e, x[0] & 1)
// Return the result.
copy(d[:], x[:])
return qr
}
// Encode element into exactly 32 bytes. The encoding is appended to the
// provided slice, and the resulting slice is returned. The extension is
// done in place if the provided slice has enough capacity.
func (d *GF255e) Encode(dst []byte) []byte {
return gf_encode(dst, (*[4]uint64)(d), mq255e)
}
// Decode element from 32 bytes. If the source is invalid (out of range),
// then the decoded value is zero, and 0 is returned; otherwise, 1 is
// returned.
func (d *GF255e) Decode(src []byte) uint64 {
return gf_decode((*[4]uint64)(d), src, mq255e)
}
// Decode element from bytes. The input bytes are interpreted as an
// integer (unsigned little-endian convention) which is reduced modulo
// the field modulus. By definition, this process cannot fail.
func (d *GF255e) DecodeReduce(src []byte) *GF255e {
gf_decodeReduce((*[4]uint64)(d), src, mq255e)
return d
} | // Field element of value 1/2^508 (used internally for inversions). |
authorizer.rs | //! Authorizers are need to exchange code grants for bearer tokens.
//!
//! The role of an authorizer is the ensure the consistency and security of request in which a
//! client is willing to trade a code grant for a bearer token. As such, it will first issue grants
//! to client according to parameters given by the resource owner and the registrar. Upon a client
//! side request, it will then check the given parameters to determine the authorization of such
//! clients.
use std::collections::HashMap;
use std::sync::{MutexGuard, RwLockWriteGuard};
use super::grant::Grant;
use super::generator::TagGrant;
/// Authorizers create and manage authorization codes.
///
/// The authorization code can be traded for a bearer token at the token endpoint.
pub trait Authorizer {
/// Create a code which allows retrieval of a bearer token at a later time.
fn authorize(&mut self, _: Grant) -> Result<String, ()>;
/// Retrieve the parameters associated with a token, invalidating the code in the process. In
/// particular, a code should not be usable twice (there is no stateless implementation of an
/// authorizer for this reason).
fn extract(&mut self, token: &str) -> Result<Option<Grant>, ()>;
}
/// An in-memory hash map.
///
/// This authorizer saves a mapping of generated strings to their associated grants. The generator
/// is itself trait based and can be chosen during construction. It is assumed to not be possible
/// for two different grants to generate the same token in the issuer.
pub struct AuthMap<I: TagGrant = Box<dyn TagGrant + Send + Sync + 'static>> {
tagger: I,
usage: u64,
tokens: HashMap<String, Grant>,
}
impl<I: TagGrant> AuthMap<I> {
/// Create an authorizer generating tokens with the `tagger`.
///
/// The token map is initially empty and is filled by methods provided in its [`Authorizer`]
/// implementation.
///
/// [`Authorizer`]: ./trait.Authorizer.html
pub fn new(tagger: I) -> Self {
AuthMap {
tagger,
usage: 0,
tokens: HashMap::new(),
}
}
}
impl<'a, A: Authorizer + ?Sized> Authorizer for &'a mut A {
fn authorize(&mut self, grant: Grant) -> Result<String, ()> {
(**self).authorize(grant)
}
fn extract(&mut self, code: &str) -> Result<Option<Grant>, ()> {
(**self).extract(code)
}
}
impl<A: Authorizer + ?Sized> Authorizer for Box<A> {
fn authorize(&mut self, grant: Grant) -> Result<String, ()> {
(**self).authorize(grant)
}
fn extract(&mut self, code: &str) -> Result<Option<Grant>, ()> {
(**self).extract(code)
}
}
impl<'a, A: Authorizer + ?Sized> Authorizer for MutexGuard<'a, A> {
fn | (&mut self, grant: Grant) -> Result<String, ()> {
(**self).authorize(grant)
}
fn extract(&mut self, code: &str) -> Result<Option<Grant>, ()> {
(**self).extract(code)
}
}
impl<'a, A: Authorizer + ?Sized> Authorizer for RwLockWriteGuard<'a, A> {
fn authorize(&mut self, grant: Grant) -> Result<String, ()> {
(**self).authorize(grant)
}
fn extract(&mut self, code: &str) -> Result<Option<Grant>, ()> {
(**self).extract(code)
}
}
impl<I: TagGrant> Authorizer for AuthMap<I> {
fn authorize(&mut self, grant: Grant) -> Result<String, ()> {
// The (usage, grant) tuple needs to be unique. Since this wraps after 2^64 operations, we
// expect the validity time of the grant to have changed by then. This works when you don't
// set your system time forward/backward ~20billion seconds, assuming ~10^9 operations per
// second.
let next_usage = self.usage.wrapping_add(1);
let token = self.tagger.tag(next_usage - 1, &grant)?;
self.tokens.insert(token.clone(), grant);
self.usage = next_usage;
Ok(token)
}
fn extract<'a>(&mut self, grant: &'a str) -> Result<Option<Grant>, ()> {
Ok(self.tokens.remove(grant))
}
}
#[cfg(test)]
/// Tests for authorizer implementations, including those provided here.
pub mod tests {
use super::*;
use chrono::Utc;
use crate::primitives::grant::Extensions;
use crate::primitives::generator::{Assertion, AssertionKind, RandomGenerator};
/// Tests some invariants that should be upheld by all authorizers.
///
/// Custom implementations may want to import and use this in their own tests.
pub fn simple_test_suite(authorizer: &mut dyn Authorizer) {
let grant = Grant {
owner_id: "Owner".to_string(),
client_id: "Client".to_string(),
scope: "One two three scopes".parse().unwrap(),
redirect_uri: "https://example.com/redirect_me".parse().unwrap(),
until: Utc::now(),
extensions: Extensions::new(),
};
let token = authorizer
.authorize(grant.clone())
.expect("Authorization should not fail here");
let recovered_grant = authorizer
.extract(&token)
.expect("Primitive failed extracting grant")
.expect("Could not extract grant for valid token");
if grant != recovered_grant {
panic!("Grant was not stored correctly");
}
if authorizer.extract(&token).unwrap().is_some() {
panic!("Token must only be usable once");
}
// Authorize the same token again.
let token_again = authorizer
.authorize(grant.clone())
.expect("Authorization should not fail here");
// We don't produce the same token twice.
assert_ne!(token, token_again);
}
#[test]
fn random_test_suite() {
let mut storage = AuthMap::new(RandomGenerator::new(16));
simple_test_suite(&mut storage);
}
#[test]
fn signing_test_suite() {
let assertion = Assertion::new(
AssertionKind::HmacSha256,
b"7EGgy8zManReq9l/ez0AyYE+xPpcTbssgW+8gBnIv3s=",
);
let mut storage = AuthMap::new(assertion);
simple_test_suite(&mut storage);
}
#[test]
#[should_panic]
fn bad_generator() {
struct BadGenerator;
impl TagGrant for BadGenerator {
fn tag(&mut self, _: u64, _: &Grant) -> Result<String, ()> {
Ok("YOLO.HowBadCanItBeToRepeatTokens?".into())
}
}
let mut storage = AuthMap::new(BadGenerator);
simple_test_suite(&mut storage);
}
}
| authorize |
http.go | /* ****************************************************************************
* Copyright 2020 51 Degrees Mobile Experts Limited (51degrees.com)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
* ***************************************************************************/
package common
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
)
// SWANError is used to pass back errors from methods that call APIs. If the
// Response member is set then the called method can use this information in
// its response. If it is not set then an internal server error can be assumed.
type SWANError struct {
Err error // The underlying error message.
Response *http.Response // The HTTP response that caused the error.
}
// StatusCode returns the status code of the response.
func (e *SWANError) StatusCode() int {
if e.Response != nil {
return e.Response.StatusCode
}
return 0
}
// Error returns the error message as a string from an HTTPError reference.
func (e *SWANError) Error() string { return e.Err.Error() }
// Handler for all HTTP requests to domains controlled by the demo.
func Handler(d []*Domain) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// Set to true if a domain is found and handled.
found := false
// r.Host may include the port number or www prefixes or other
// characters dependent on the environment. Using strings.Contains
// rather than testing for equality eliminates these issues for a demo
// where the domain names are not sub strings of one another.
for _, domain := range d {
if strings.EqualFold(r.Host, domain.Host) {
// Try static resources first.
f, err := handlerStatic(domain, w, r)
if err != nil {
ReturnServerError(domain.Config, w, err)
return
}
// If not found then use the domain handler.
if f == false {
domain.handler(domain, w, r)
}
// Mark as the domain being found and then break.
found = true
break
}
}
// All handlers have been tried and nothing has been found. Return not
// found.
if found == false {
http.NotFound(w, r)
}
}
}
// NewSWANError creates an error instance that includes the details of the
// response returned. This is needed to pass the correct status codes and
// context back to the caller.
func NewSWANError(c *Configuration, r *http.Response) *SWANError |
// ReturnProxyError returns an error where the request is related to a proxy
// request being passed to another end point.
func ReturnProxyError(c *Configuration, w http.ResponseWriter, e *SWANError) {
s := http.StatusInternalServerError
if e.Response != nil {
s = e.Response.StatusCode
}
ReturnStatusCodeError(c, w, e.Err, s)
}
// ReturnServerError returns an internal server error.
func ReturnServerError(c *Configuration, w http.ResponseWriter, e error) {
ReturnStatusCodeError(c, w, e, http.StatusInternalServerError)
}
// ReturnStatusCodeError returns the HTTP status code specified.
func ReturnStatusCodeError(
c *Configuration,
w http.ResponseWriter,
e error,
code int) {
http.Error(w, e.Error(), code)
if c.Debug {
println(e.Error())
}
}
// GetCleanURL returns a URL with the SWAN data removed.
func GetCleanURL(c *Configuration, r *http.Request) *url.URL {
var u url.URL
u.Scheme = c.Scheme
u.Host = r.Host
u.Path = strings.ReplaceAll(
r.URL.Path,
GetSWANDataFromRequest(r),
"")
u.RawQuery = ""
return &u
}
// GetReturnURL returns a parsed URL from the query string, or if not present
// from the referer HTTP header.
func GetReturnURL(r *http.Request) (*url.URL, error) {
u, err := url.Parse(r.Form.Get("returnUrl"))
if err != nil {
return nil, err
}
if u == nil || u.String() == "" {
u, err = url.Parse(r.Header.Get("Referer"))
if err != nil {
return nil, err
}
}
u.RawQuery = ""
return u, nil
}
// GetCurrentPage returns the current request URL.
func GetCurrentPage(c *Configuration, r *http.Request) *url.URL {
var u url.URL
u.Scheme = c.Scheme
u.Host = r.Host
u.Path = r.URL.Path
return &u
}
| {
var u string
in, err := ioutil.ReadAll(r.Body)
if err != nil {
return &SWANError{err, nil}
}
if c.Debug {
u = r.Request.URL.String()
} else {
u = r.Request.Host
}
return &SWANError{
fmt.Errorf("SWAN '%s' status '%d' message '%s'",
u,
r.StatusCode,
strings.TrimSpace(string(in))),
r}
} |
MinusIcon.js | const React = require("react");
function MinusIcon(props) { | return /*#__PURE__*/React.createElement("svg", Object.assign({
xmlns: "http://www.w3.org/2000/svg",
viewBox: "0 0 20 20",
fill: "currentColor",
"aria-hidden": "true"
}, props), /*#__PURE__*/React.createElement("path", {
fillRule: "evenodd",
d: "M3 10a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1z",
clipRule: "evenodd"
}));
}
module.exports = MinusIcon; | |
community.py | # Copyright(C) 2011, 2015, 2018 by
# Ben Edwards <[email protected]>
# Aric Hagberg <[email protected]>
# Konstantinos Karakatsanis <[email protected]>
# All rights reserved.
# BSD license.
#
# Authors: Ben Edwards ([email protected])
# Aric Hagberg ([email protected])
# Konstantinos Karakatsanis ([email protected])
# Jean-Gabriel Young ([email protected])
"""Generators for classes of graphs used in studying social networks."""
from __future__ import division
import itertools
import math
import networkx as nx
from networkx.utils import py_random_state
__all__ = ['caveman_graph', 'connected_caveman_graph',
'relaxed_caveman_graph', 'random_partition_graph',
'planted_partition_graph', 'gaussian_random_partition_graph',
'ring_of_cliques', 'windmill_graph', 'stochastic_block_model']
def caveman_graph(l, k):
"""Returns a caveman graph of `l` cliques of size `k`.
Parameters
----------
l : int
Number of cliques
k : int
Size of cliques
Returns
-------
G : NetworkX Graph
caveman graph
Notes
-----
This returns an undirected graph, it can be converted to a directed
graph using :func:`nx.to_directed`, or a multigraph using
``nx.MultiGraph(nx.caveman_graph(l, k))``. Only the undirected version is
described in [1]_ and it is unclear which of the directed
generalizations is most useful.
Examples
--------
>>> G = nx.caveman_graph(3, 3)
See also
--------
connected_caveman_graph
References
----------
.. [1] Watts, D. J. 'Networks, Dynamics, and the Small-World Phenomenon.'
Amer. J. Soc. 105, 493-527, 1999.
"""
# l disjoint cliques of size k
G = nx.empty_graph(l * k)
if k > 1:
for start in range(0, l * k, k):
edges = itertools.combinations(range(start, start + k), 2)
G.add_edges_from(edges)
return G
def connected_caveman_graph(l, k):
"""Returns a connected caveman graph of `l` cliques of size `k`.
The connected caveman graph is formed by creating `n` cliques of size
`k`, then a single edge in each clique is rewired to a node in an
adjacent clique.
Parameters
----------
l : int
number of cliques
k : int
size of cliques
Returns
-------
G : NetworkX Graph
connected caveman graph
Notes
-----
This returns an undirected graph, it can be converted to a directed
graph using :func:`nx.to_directed`, or a multigraph using
``nx.MultiGraph(nx.caveman_graph(l, k))``. Only the undirected version is
described in [1]_ and it is unclear which of the directed
generalizations is most useful.
Examples
--------
>>> G = nx.connected_caveman_graph(3, 3)
References
----------
.. [1] Watts, D. J. 'Networks, Dynamics, and the Small-World Phenomenon.'
Amer. J. Soc. 105, 493-527, 1999.
"""
G = nx.caveman_graph(l, k)
for start in range(0, l * k, k):
G.remove_edge(start, start + 1)
G.add_edge(start, (start - 1) % (l * k))
return G
@py_random_state(3)
def relaxed_caveman_graph(l, k, p, seed=None):
"""Returns a relaxed caveman graph.
A relaxed caveman graph starts with `l` cliques of size `k`. Edges are
then randomly rewired with probability `p` to link different cliques.
Parameters
----------
l : int
Number of groups
k : int
Size of cliques
p : float
Probabilty of rewiring each edge.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
G : NetworkX Graph
Relaxed Caveman Graph
Raises
------
NetworkXError:
If p is not in [0,1]
Examples
--------
>>> G = nx.relaxed_caveman_graph(2, 3, 0.1, seed=42)
References
----------
.. [1] Santo Fortunato, Community Detection in Graphs,
Physics Reports Volume 486, Issues 3-5, February 2010, Pages 75-174.
https://arxiv.org/abs/0906.0612
"""
G = nx.caveman_graph(l, k)
nodes = list(G)
for (u, v) in G.edges():
if seed.random() < p: # rewire the edge
x = seed.choice(nodes)
if G.has_edge(u, x):
continue
G.remove_edge(u, v)
G.add_edge(u, x)
return G
@py_random_state(3)
def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False):
"""Returns the random partition graph with a partition of sizes.
A partition graph is a graph of communities with sizes defined by
s in sizes. Nodes in the same group are connected with probability
p_in and nodes of different groups are connected with probability
p_out.
Parameters
----------
sizes : list of ints
Sizes of groups
p_in : float
probability of edges with in groups
p_out : float
probability of edges between groups
directed : boolean optional, default=False
Whether to create a directed graph
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
G : NetworkX Graph or DiGraph
random partition graph of size sum(gs)
Raises
------
NetworkXError
If p_in or p_out is not in [0,1]
Examples
--------
>>> G = nx.random_partition_graph([10,10,10],.25,.01)
>>> len(G)
30
>>> partition = G.graph['partition']
>>> len(partition)
3
Notes
-----
This is a generalization of the planted-l-partition described in
[1]_. It allows for the creation of groups of any size.
The partition is store as a graph attribute 'partition'.
References
----------
.. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612
"""
# Use geometric method for O(n+m) complexity algorithm
# partition = nx.community_sets(nx.get_node_attributes(G, 'affiliation'))
if not 0.0 <= p_in <= 1.0:
raise nx.NetworkXError("p_in must be in [0,1]")
if not 0.0 <= p_out <= 1.0:
raise nx.NetworkXError("p_out must be in [0,1]")
# create connection matrix
num_blocks = len(sizes)
p = [[p_out for s in range(num_blocks)] for r in range(num_blocks)]
for r in range(num_blocks):
p[r][r] = p_in
return stochastic_block_model(sizes, p, nodelist=None, seed=seed,
directed=directed, selfloops=False,
sparse=True)
@py_random_state(4)
def | (l, k, p_in, p_out, seed=None, directed=False):
"""Returns the planted l-partition graph.
This model partitions a graph with n=l*k vertices in
l groups with k vertices each. Vertices of the same
group are linked with a probability p_in, and vertices
of different groups are linked with probability p_out.
Parameters
----------
l : int
Number of groups
k : int
Number of vertices in each group
p_in : float
probability of connecting vertices within a group
p_out : float
probability of connected vertices between groups
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
directed : bool,optional (default=False)
If True return a directed graph
Returns
-------
G : NetworkX Graph or DiGraph
planted l-partition graph
Raises
------
NetworkXError:
If p_in,p_out are not in [0,1] or
Examples
--------
>>> G = nx.planted_partition_graph(4, 3, 0.5, 0.1, seed=42)
See Also
--------
random_partition_model
References
----------
.. [1] A. Condon, R.M. Karp, Algorithms for graph partitioning
on the planted partition model,
Random Struct. Algor. 18 (2001) 116-140.
.. [2] Santo Fortunato 'Community Detection in Graphs' Physical Reports
Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612
"""
return random_partition_graph([k] * l, p_in, p_out, seed, directed)
@py_random_state(6)
def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False,
seed=None):
"""Generate a Gaussian random partition graph.
A Gaussian random partition graph is created by creating k partitions
each with a size drawn from a normal distribution with mean s and variance
s/v. Nodes are connected within clusters with probability p_in and
between clusters with probability p_out[1]
Parameters
----------
n : int
Number of nodes in the graph
s : float
Mean cluster size
v : float
Shape parameter. The variance of cluster size distribution is s/v.
p_in : float
Probabilty of intra cluster connection.
p_out : float
Probability of inter cluster connection.
directed : boolean, optional default=False
Whether to create a directed graph or not
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
Returns
-------
G : NetworkX Graph or DiGraph
gaussian random partition graph
Raises
------
NetworkXError
If s is > n
If p_in or p_out is not in [0,1]
Notes
-----
Note the number of partitions is dependent on s,v and n, and that the
last partition may be considerably smaller, as it is sized to simply
fill out the nodes [1]
See Also
--------
random_partition_graph
Examples
--------
>>> G = nx.gaussian_random_partition_graph(100,10,10,.25,.1)
>>> len(G)
100
References
----------
.. [1] Ulrik Brandes, Marco Gaertler, Dorothea Wagner,
Experiments on Graph Clustering Algorithms,
In the proceedings of the 11th Europ. Symp. Algorithms, 2003.
"""
if s > n:
raise nx.NetworkXError("s must be <= n")
assigned = 0
sizes = []
while True:
size = int(seed.gauss(s, float(s) / v + 0.5))
if size < 1: # how to handle 0 or negative sizes?
continue
if assigned + size >= n:
sizes.append(n - assigned)
break
assigned += size
sizes.append(size)
return random_partition_graph(sizes, p_in, p_out, directed, seed)
def ring_of_cliques(num_cliques, clique_size):
"""Defines a "ring of cliques" graph.
A ring of cliques graph is consisting of cliques, connected through single
links. Each clique is a complete graph.
Parameters
----------
num_cliques : int
Number of cliques
clique_size : int
Size of cliques
Returns
-------
G : NetworkX Graph
ring of cliques graph
Raises
------
NetworkXError
If the number of cliques is lower than 2 or
if the size of cliques is smaller than 2.
Examples
--------
>>> G = nx.ring_of_cliques(8, 4)
See Also
--------
connected_caveman_graph
Notes
-----
The `connected_caveman_graph` graph removes a link from each clique to
connect it with the next clique. Instead, the `ring_of_cliques` graph
simply adds the link without removing any link from the cliques.
"""
if num_cliques < 2:
raise nx.NetworkXError('A ring of cliques must have at least '
'two cliques')
if clique_size < 2:
raise nx.NetworkXError('The cliques must have at least two nodes')
G = nx.Graph()
for i in range(num_cliques):
edges = itertools.combinations(range(i * clique_size, i * clique_size +
clique_size), 2)
G.add_edges_from(edges)
G.add_edge(i * clique_size + 1, (i + 1) * clique_size %
(num_cliques * clique_size))
return G
def windmill_graph(n, k):
"""Generate a windmill graph.
A windmill graph is a graph of `n` cliques each of size `k` that are all
joined at one node.
It can be thought of as taking a disjoint union of `n` cliques of size `k`,
selecting one point from each, and contracting all of the selected points.
Alternatively, one could generate `n` cliques of size `k-1` and one node
that is connected to all other nodes in the graph.
Parameters
----------
n : int
Number of cliques
k : int
Size of cliques
Returns
-------
G : NetworkX Graph
windmill graph with n cliques of size k
Raises
------
NetworkXError
If the number of cliques is less than two
If the size of the cliques are less than two
Examples
--------
>>> G = nx.windmill_graph(4, 5)
Notes
-----
The node labeled `0` will be the node connected to all other nodes.
Note that windmill graphs are usually denoted `Wd(k,n)`, so the parameters
are in the opposite order as the parameters of this method.
"""
if n < 2:
msg = 'A windmill graph must have at least two cliques'
raise nx.NetworkXError(msg)
if k < 2:
raise nx.NetworkXError('The cliques must have at least two nodes')
G = nx.disjoint_union_all(itertools.chain([nx.complete_graph(k)],
(nx.complete_graph(k - 1)
for _ in range(n - 1))))
G.add_edges_from((0, i) for i in range(k, G.number_of_nodes()))
return G
@py_random_state(3)
def stochastic_block_model(sizes, p, nodelist=None, seed=None,
directed=False, selfloops=False, sparse=True):
"""Returns a stochastic block model graph.
This model partitions the nodes in blocks of arbitrary sizes, and places
edges between pairs of nodes independently, with a probability that depends
on the blocks.
Parameters
----------
sizes : list of ints
Sizes of blocks
p : list of list of floats
Element (r,s) gives the density of edges going from the nodes
of group r to nodes of group s.
p must match the number of groups (len(sizes) == len(p)),
and it must be symmetric if the graph is undirected.
nodelist : list, optional
The block tags are assigned according to the node identifiers
in nodelist. If nodelist is None, then the ordering is the
range [0,sum(sizes)-1].
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
directed : boolean optional, default=False
Whether to create a directed graph or not.
selfloops : boolean optional, default=False
Whether to include self-loops or not.
sparse: boolean optional, default=True
Use the sparse heuristic to speed up the generator.
Returns
-------
g : NetworkX Graph or DiGraph
Stochastic block model graph of size sum(sizes)
Raises
------
NetworkXError
If probabilities are not in [0,1].
If the probability matrix is not square (directed case).
If the probability matrix is not symmetric (undirected case).
If the sizes list does not match nodelist or the probability matrix.
If nodelist contains duplicate.
Examples
--------
>>> sizes = [75, 75, 300]
>>> probs = [[0.25, 0.05, 0.02],
... [0.05, 0.35, 0.07],
... [0.02, 0.07, 0.40]]
>>> g = nx.stochastic_block_model(sizes, probs, seed=0)
>>> len(g)
450
>>> H = nx.quotient_graph(g, g.graph['partition'], relabel=True)
>>> for v in H.nodes(data=True):
... print(round(v[1]['density'], 3))
...
0.245
0.348
0.405
>>> for v in H.edges(data=True):
... print(round(1.0 * v[2]['weight'] / (sizes[v[0]] * sizes[v[1]]), 3))
...
0.051
0.022
0.07
See Also
--------
random_partition_graph
planted_partition_graph
gaussian_random_partition_graph
gnp_random_graph
References
----------
.. [1] Holland, P. W., Laskey, K. B., & Leinhardt, S.,
"Stochastic blockmodels: First steps",
Social networks, 5(2), 109-137, 1983.
"""
# Check if dimensions match
if len(sizes) != len(p):
raise nx.NetworkXException("'sizes' and 'p' do not match.")
# Check for probability symmetry (undirected) and shape (directed)
for row in p:
if len(p) != len(row):
raise nx.NetworkXException("'p' must be a square matrix.")
if not directed:
p_transpose = [list(i) for i in zip(*p)]
for i in zip(p, p_transpose):
for j in zip(i[0], i[1]):
if abs(j[0] - j[1]) > 1e-08:
raise nx.NetworkXException("'p' must be symmetric.")
# Check for probability range
for row in p:
for prob in row:
if prob < 0 or prob > 1:
raise nx.NetworkXException("Entries of 'p' not in [0,1].")
# Check for nodelist consistency
if nodelist is not None:
if len(nodelist) != sum(sizes):
raise nx.NetworkXException("'nodelist' and 'sizes' do not match.")
if len(nodelist) != len(set(nodelist)):
raise nx.NetworkXException("nodelist contains duplicate.")
else:
nodelist = range(0, sum(sizes))
# Setup the graph conditionally to the directed switch.
block_range = range(len(sizes))
if directed:
g = nx.DiGraph()
block_iter = itertools.product(block_range, block_range)
else:
g = nx.Graph()
block_iter = itertools.combinations_with_replacement(block_range, 2)
# Split nodelist in a partition (list of sets).
size_cumsum = [sum(sizes[0:x]) for x in range(0, len(sizes) + 1)]
g.graph['partition'] = [set(nodelist[size_cumsum[x]:size_cumsum[x + 1]])
for x in range(0, len(size_cumsum) - 1)]
# Setup nodes and graph name
for block_id, nodes in enumerate(g.graph['partition']):
for node in nodes:
g.add_node(node, block=block_id)
g.name = "stochastic_block_model"
# Test for edge existence
parts = g.graph['partition']
for i, j in block_iter:
if i == j:
if directed:
if selfloops:
edges = itertools.product(parts[i], parts[i])
else:
edges = itertools.permutations(parts[i], 2)
else:
edges = itertools.combinations(parts[i], 2)
if selfloops:
edges = itertools.chain(edges, zip(parts[i], parts[i]))
for e in edges:
if seed.random() < p[i][j]:
g.add_edge(*e)
else:
edges = itertools.product(parts[i], parts[j])
if sparse:
if p[i][j] == 1: # Test edges cases p_ij = 0 or 1
for e in edges:
g.add_edge(*e)
elif p[i][j] > 0:
while True:
try:
logrand = math.log(seed.random())
skip = math.floor(logrand / math.log(1 - p[i][j]))
# consume "skip" edges
next(itertools.islice(edges, skip, skip), None)
e = next(edges)
g.add_edge(*e) # __safe
except StopIteration:
break
else:
for e in edges:
if seed.random() < p[i][j]:
g.add_edge(*e) # __safe
return g
| planted_partition_graph |
path_utils.py | import os, sys, inspect
import subprocess
from . import version
__version__ = version.get_current()
def get_current_dir_for_jupyter():
"""Get the current path for jupyter"""
return getCurrentDir(os.getcwd())
def get_current_dir(current_file):
"""Get the current path"""
current_dir = os.path.dirname(current_file)
return current_dir
def get_parent_dir(current_file):
"""Get the parth path"""
current_dir = os.path.dirname(current_file)
parent_dir = os.path.dirname(current_dir)
return parent_dir
def join(path, *paths):
"""Path join"""
return os.path.join(path, *paths)
def insert_parent_package_dir(current_file):
"""Insert parent package dir"""
current_dir = os.path.dirname(current_file) | parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir) |
|
continuous.py | # This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=missing-return-doc, invalid-unary-operand-type
"""Module for builtin continuous pulse functions."""
import functools
from typing import Union, Tuple, Optional
import numpy as np
from qiskit.pulse.exceptions import PulseError
def constant(times: np.ndarray, amp: complex) -> np.ndarray:
"""Continuous constant pulse.
Args:
times: Times to output pulse for.
amp: Complex pulse amplitude.
"""
return np.full(len(times), amp, dtype=np.complex_)
def zero(times: np.ndarray) -> np.ndarray:
"""Continuous zero pulse.
Args:
times: Times to output pulse for.
"""
return constant(times, 0)
def square(times: np.ndarray, amp: complex, freq: float, phase: float = 0) -> np.ndarray:
"""Continuous square wave.
Args:
times: Times to output wave for.
amp: Pulse amplitude. Wave range is [-amp, amp].
freq: Pulse frequency. units of 1/dt.
phase: Pulse phase.
"""
x = times * freq + phase / np.pi
return amp * (2 * (2 * np.floor(x) - np.floor(2 * x)) + 1).astype(np.complex_)
def sawtooth(times: np.ndarray, amp: complex, freq: float, phase: float = 0) -> np.ndarray:
"""Continuous sawtooth wave.
Args:
times: Times to output wave for.
amp: Pulse amplitude. Wave range is [-amp, amp].
freq: Pulse frequency. units of 1/dt.
phase: Pulse phase.
"""
x = times * freq + phase / np.pi
return amp * 2 * (x - np.floor(1 / 2 + x)).astype(np.complex_)
def triangle(times: np.ndarray, amp: complex, freq: float, phase: float = 0) -> np.ndarray:
"""Continuous triangle wave.
Args:
times: Times to output wave for.
amp: Pulse amplitude. Wave range is [-amp, amp].
freq: Pulse frequency. units of 1/dt.
phase: Pulse phase.
"""
return amp * (-2 * np.abs(sawtooth(times, 1, freq, phase=(phase - np.pi / 2) / 2)) + 1).astype(
np.complex_
)
def cos(times: np.ndarray, amp: complex, freq: float, phase: float = 0) -> np.ndarray:
"""Continuous cosine wave.
Args:
times: Times to output wave for.
amp: Pulse amplitude.
freq: Pulse frequency, units of 1/dt.
phase: Pulse phase.
"""
return amp * np.cos(2 * np.pi * freq * times + phase).astype(np.complex_)
def sin(times: np.ndarray, amp: complex, freq: float, phase: float = 0) -> np.ndarray:
"""Continuous cosine wave.
Args:
times: Times to output wave for.
amp: Pulse amplitude.
freq: Pulse frequency, units of 1/dt.
phase: Pulse phase.
"""
return amp * np.sin(2 * np.pi * freq * times + phase).astype(np.complex_)
def _fix_gaussian_width(
gaussian_samples,
amp: float,
center: float,
sigma: float,
zeroed_width: Optional[float] = None,
rescale_amp: bool = False,
ret_scale_factor: bool = False,
) -> np.ndarray:
r"""Enforce that the supplied gaussian pulse is zeroed at a specific width.
This is achieved by subtracting $\Omega_g(center \pm zeroed_width/2)$ from all samples.
amp: Pulse amplitude at `center`.
center: Center (mean) of pulse.
sigma: Standard deviation of pulse.
zeroed_width: Subtract baseline from gaussian pulses to make sure
$\Omega_g(center \pm zeroed_width/2)=0$ is satisfied. This is used to avoid
large discontinuities at the start of a gaussian pulse. If unsupplied,
defaults to $2*(center + 1)$ such that $\Omega_g(-1)=0$ and $\Omega_g(2*(center + 1))=0$.
rescale_amp: If True the pulse will be rescaled so that $\Omega_g(center)=amp$.
ret_scale_factor: Return amplitude scale factor.
"""
if zeroed_width is None:
zeroed_width = 2 * (center + 1)
zero_offset = gaussian(np.array([zeroed_width / 2]), amp, 0, sigma)
gaussian_samples -= zero_offset
amp_scale_factor = 1.0
if rescale_amp:
amp_scale_factor = amp / (amp - zero_offset) if amp - zero_offset != 0 else 1.0
gaussian_samples *= amp_scale_factor
if ret_scale_factor:
return gaussian_samples, amp_scale_factor
return gaussian_samples
def gaussian(
times: np.ndarray,
amp: complex,
center: float,
sigma: float,
zeroed_width: Optional[float] = None,
rescale_amp: bool = False,
ret_x: bool = False,
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]:
r"""Continuous unnormalized gaussian pulse.
Integrated area under curve is $\Omega_g(amp, sigma) = amp \times np.sqrt(2\pi \sigma^2)$
Args:
times: Times to output pulse for.
amp: Pulse amplitude at `center`. If `zeroed_width` is set pulse amplitude at center
will be $amp-\Omega_g(center \pm zeroed_width/2)$ unless `rescale_amp` is set,
in which case all samples will be rescaled such that the center
amplitude will be `amp`.
center: Center (mean) of pulse.
sigma: Width (standard deviation) of pulse.
zeroed_width: Subtract baseline from gaussian pulses to make sure
$\Omega_g(center \pm zeroed_width/2)=0$ is satisfied. This is used to avoid
large discontinuities at the start of a gaussian pulse.
rescale_amp: If `zeroed_width` is not `None` and `rescale_amp=True` the pulse will
be rescaled so that $\Omega_g(center)=amp$.
ret_x: Return centered and standard deviation normalized pulse location.
$x=(times-center)/sigma.
"""
times = np.asarray(times, dtype=np.complex_)
x = (times - center) / sigma
gauss = amp * np.exp(-(x ** 2) / 2).astype(np.complex_)
if zeroed_width is not None:
gauss = _fix_gaussian_width(
gauss,
amp=amp,
center=center,
sigma=sigma,
zeroed_width=zeroed_width,
rescale_amp=rescale_amp,
)
if ret_x:
return gauss, x
return gauss
def gaussian_deriv(
times: np.ndarray,
amp: complex,
center: float,
sigma: float,
ret_gaussian: bool = False,
zeroed_width: Optional[float] = None,
rescale_amp: bool = False,
) -> np.ndarray:
r"""Continuous unnormalized gaussian derivative pulse.
Args:
times: Times to output pulse for.
amp: Pulse amplitude at `center`.
center: Center (mean) of pulse.
sigma: Width (standard deviation) of pulse.
ret_gaussian: Return gaussian with which derivative was taken with.
zeroed_width: Subtract baseline of pulse to make sure
$\Omega_g(center \pm zeroed_width/2)=0$ is satisfied. This is used to avoid
large discontinuities at the start of a pulse.
rescale_amp: If `zeroed_width` is not `None` and `rescale_amp=True` the pulse will
be rescaled so that $\Omega_g(center)=amp$.
"""
gauss, x = gaussian(
times,
amp=amp,
center=center,
sigma=sigma,
zeroed_width=zeroed_width,
rescale_amp=rescale_amp,
ret_x=True,
)
gauss_deriv = -x / sigma * gauss
if ret_gaussian:
return gauss_deriv, gauss
return gauss_deriv
def _fix_sech_width(
sech_samples,
amp: float,
center: float,
sigma: float,
zeroed_width: Optional[float] = None,
rescale_amp: bool = False,
ret_scale_factor: bool = False,
) -> np.ndarray:
r"""Enforce that the supplied sech pulse is zeroed at a specific width.
This is achieved by subtracting $\Omega_g(center \pm zeroed_width/2)$ from all samples.
amp: Pulse amplitude at `center`.
center: Center (mean) of pulse.
sigma: Standard deviation of pulse.
zeroed_width: Subtract baseline from sech pulses to make sure
$\Omega_g(center \pm zeroed_width/2)=0$ is satisfied. This is used to avoid
large discontinuities at the start of a sech pulse. If unsupplied,
defaults to $2*(center + 1)$ such that $\Omega_g(-1)=0$ and $\Omega_g(2*(center + 1))=0$.
rescale_amp: If True the pulse will be rescaled so that $\Omega_g(center)=amp$.
ret_scale_factor: Return amplitude scale factor.
"""
if zeroed_width is None:
zeroed_width = 2 * (center + 1)
zero_offset = sech(np.array([zeroed_width / 2]), amp, 0, sigma)
sech_samples -= zero_offset
amp_scale_factor = 1.0
if rescale_amp:
amp_scale_factor = amp / (amp - zero_offset) if amp - zero_offset != 0 else 1.0
sech_samples *= amp_scale_factor
if ret_scale_factor:
return sech_samples, amp_scale_factor
return sech_samples
def | (x, *args, **kwargs):
r"""Hyperbolic secant function"""
return 1.0 / np.cosh(x, *args, **kwargs)
def sech(
times: np.ndarray,
amp: complex,
center: float,
sigma: float,
zeroed_width: Optional[float] = None,
rescale_amp: bool = False,
ret_x: bool = False,
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]:
r"""Continuous unnormalized sech pulse.
Args:
times: Times to output pulse for.
amp: Pulse amplitude at `center`.
center: Center (mean) of pulse.
sigma: Width (standard deviation) of pulse.
zeroed_width: Subtract baseline from pulse to make sure
$\Omega_g(center \pm zeroed_width/2)=0$ is satisfied. This is used to avoid
large discontinuities at the start and end of the pulse.
rescale_amp: If `zeroed_width` is not `None` and `rescale_amp=True` the pulse will
be rescaled so that $\Omega_g(center)=amp$.
ret_x: Return centered and standard deviation normalized pulse location.
$x=(times-center)/sigma$.
"""
times = np.asarray(times, dtype=np.complex_)
x = (times - center) / sigma
sech_out = amp * sech_fn(x).astype(np.complex_)
if zeroed_width is not None:
sech_out = _fix_sech_width(
sech_out,
amp=amp,
center=center,
sigma=sigma,
zeroed_width=zeroed_width,
rescale_amp=rescale_amp,
)
if ret_x:
return sech_out, x
return sech_out
def sech_deriv(
times: np.ndarray, amp: complex, center: float, sigma: float, ret_sech: bool = False
) -> np.ndarray:
"""Continuous unnormalized sech derivative pulse.
Args:
times: Times to output pulse for.
amp: Pulse amplitude at `center`.
center: Center (mean) of pulse.
sigma: Width (standard deviation) of pulse.
ret_sech: Return sech with which derivative was taken with.
"""
sech_out, x = sech(times, amp=amp, center=center, sigma=sigma, ret_x=True)
sech_out_deriv = -sech_out * np.tanh(x) / sigma
if ret_sech:
return sech_out_deriv, sech_out
return sech_out_deriv
def gaussian_square(
times: np.ndarray,
amp: complex,
center: float,
square_width: float,
sigma: float,
zeroed_width: Optional[float] = None,
) -> np.ndarray:
r"""Continuous gaussian square pulse.
Args:
times: Times to output pulse for.
amp: Pulse amplitude.
center: Center of the square pulse component.
square_width: Width of the square pulse component.
sigma: Standard deviation of Gaussian rise/fall portion of the pulse.
zeroed_width: Subtract baseline of gaussian square pulse
to enforce $\OmegaSquare(center \pm zeroed_width/2)=0$.
Raises:
PulseError: if zeroed_width is not compatible with square_width.
"""
square_start = center - square_width / 2
square_stop = center + square_width / 2
if zeroed_width:
if zeroed_width < square_width:
raise PulseError("zeroed_width cannot be smaller than square_width.")
gaussian_zeroed_width = zeroed_width - square_width
else:
gaussian_zeroed_width = None
funclist = [
functools.partial(
gaussian,
amp=amp,
center=square_start,
sigma=sigma,
zeroed_width=gaussian_zeroed_width,
rescale_amp=True,
),
functools.partial(
gaussian,
amp=amp,
center=square_stop,
sigma=sigma,
zeroed_width=gaussian_zeroed_width,
rescale_amp=True,
),
functools.partial(constant, amp=amp),
]
condlist = [times <= square_start, times >= square_stop]
return np.piecewise(times.astype(np.complex_), condlist, funclist)
def drag(
times: np.ndarray,
amp: complex,
center: float,
sigma: float,
beta: float,
zeroed_width: Optional[float] = None,
rescale_amp: bool = False,
) -> np.ndarray:
r"""Continuous Y-only correction DRAG pulse for standard nonlinear oscillator (SNO) [1].
[1] Gambetta, J. M., Motzoi, F., Merkel, S. T. & Wilhelm, F. K.
Analytic control methods for high-fidelity unitary operations
in a weakly nonlinear oscillator. Phys. Rev. A 83, 012308 (2011).
Args:
times: Times to output pulse for.
amp: Pulse amplitude at `center`.
center: Center (mean) of pulse.
sigma: Width (standard deviation) of pulse.
beta: Y correction amplitude. For the SNO this is $\beta=-\frac{\lambda_1^2}{4\Delta_2}$.
Where $\lambds_1$ is the relative coupling strength between the first excited and second
excited states and $\Delta_2$ is the detuning between the respective excited states.
zeroed_width: Subtract baseline of drag pulse to make sure
$\Omega_g(center \pm zeroed_width/2)=0$ is satisfied. This is used to avoid
large discontinuities at the start of a drag pulse.
rescale_amp: If `zeroed_width` is not `None` and `rescale_amp=True` the pulse will
be rescaled so that $\Omega_g(center)=amp$.
"""
gauss_deriv, gauss = gaussian_deriv(
times,
amp=amp,
center=center,
sigma=sigma,
ret_gaussian=True,
zeroed_width=zeroed_width,
rescale_amp=rescale_amp,
)
return gauss + 1j * beta * gauss_deriv
| sech_fn |
test_css.py | import pytest
from django.core.exceptions import ValidationError
from pretalx.common.css import validate_css
from pretalx.event.models import Event
@pytest.fixture
def valid_css():
return '''
body {
background-color: #000;
display: none;
}
.some-descriptor {
border-style: dotted dashed solid double;
BORDER-color: red green blue yellow;
}
#best-descriptor {
border: 5px solid red;
}
'''
@pytest.fixture
def invalid_css(valid_css):
return valid_css + '''
a.other-descriptor {
content: url("https://malicious.site.com");
}
'''
@pytest.fixture
def some_object():
class Foo:
|
return Foo()
def test_valid_css(valid_css):
assert validate_css(valid_css) == valid_css
def test_invalid_css(invalid_css):
with pytest.raises(ValidationError):
validate_css(invalid_css)
@pytest.mark.django_db
def test_regenerate_css(event):
from pretalx.common.tasks import regenerate_css
event.primary_color = '#00ff00'
event.save()
regenerate_css(event.pk)
event = Event.objects.get(pk=event.pk)
for local_app in ['agenda', 'cfp', 'orga']:
assert event.settings.get(f'{local_app}_css_file')
assert event.settings.get(f'{local_app}_css_checksum')
@pytest.mark.django_db
def test_regenerate_css_no_color(event):
from pretalx.common.tasks import regenerate_css
event.primary_color = None
event.save()
regenerate_css(event.pk)
event = Event.objects.get(pk=event.pk)
for local_app in ['agenda', 'cfp', 'orga']:
assert not event.settings.get(f'{local_app}_css_file')
assert not event.settings.get(f'{local_app}_css_checksum')
| pass |
basic_ghcli.go | package ghcli
import (
"fmt"
"github.com/stanleynguyen/git-comment/comment-app/repository"
"net/http"
"github.com/stanleynguyen/git-comment/comment-app/domain"
)
type basicGithubCli struct{}
// OrgExists check if a Github org exists
func (c *basicGithubCli) OrgExists(org string) (bool, error) {
resp, err := http.Get(fmt.Sprintf("https://api.github.com/orgs/%s", org))
if err != nil {
return false, err
}
if resp.StatusCode == http.StatusNotFound {
return false, nil
} else if resp.StatusCode >= 300 {
return false, domain.NewErrorInternalServer("Problems with Github service")
}
return true, nil
}
// NewBasicGithubClient generate new instance of basic Github API client
func NewBasicGithubClient() repository.GithubCli | {
return &basicGithubCli{}
} |
|
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod role_assignments {
use crate::models::*;
pub async fn list_for_resource(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
resource_provider_namespace: &str,
parent_resource_path: &str,
resource_type: &str,
resource_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<RoleAssignmentListResult, list_for_resource::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/{}/providers/Microsoft.Authorization/roleAssignments",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_provider_namespace,
parent_resource_path,
resource_type,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(list_for_resource::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_for_resource::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_for_resource::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_for_resource::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_for_resource::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_for_resource::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_for_resource::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_for_resource {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_for_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<RoleAssignmentListResult, list_for_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/roleAssignments",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_for_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_for_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_for_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_for_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignmentListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_for_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_for_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_for_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_for_resource_group {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
scope: &str,
role_assignment_name: &str,
) -> std::result::Result<RoleAssignment, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/roleAssignments/{}",
operation_config.base_path(),
scope,
role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignment =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
scope: &str,
role_assignment_name: &str,
parameters: &RoleAssignmentCreateParameters,
) -> std::result::Result<RoleAssignment, create::Error> |
pub mod create {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
scope: &str,
role_assignment_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/roleAssignments/{}",
operation_config.base_path(),
scope,
role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignment =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(RoleAssignment),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_by_id(
operation_config: &crate::OperationConfig,
role_assignment_id: &str,
) -> std::result::Result<RoleAssignment, get_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), role_assignment_id);
let mut url = url::Url::parse(url_str).map_err(get_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignment =
serde_json::from_slice(rsp_body).map_err(|source| get_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_by_id::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_by_id {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_by_id(
operation_config: &crate::OperationConfig,
role_assignment_id: &str,
parameters: &RoleAssignmentCreateParameters,
) -> std::result::Result<RoleAssignment, create_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), role_assignment_id);
let mut url = url::Url::parse(url_str).map_err(create_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(create_by_id::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignment =
serde_json::from_slice(rsp_body).map_err(|source| create_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_by_id::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_by_id {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete_by_id(
operation_config: &crate::OperationConfig,
role_assignment_id: &str,
) -> std::result::Result<delete_by_id::Response, delete_by_id::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), role_assignment_id);
let mut url = url::Url::parse(url_str).map_err(delete_by_id::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete_by_id::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete_by_id::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(delete_by_id::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignment =
serde_json::from_slice(rsp_body).map_err(|source| delete_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(delete_by_id::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(delete_by_id::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete_by_id::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete_by_id::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete_by_id {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(RoleAssignment),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
filter: Option<&str>,
subscription_id: &str,
) -> std::result::Result<RoleAssignmentListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Authorization/roleAssignments",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignmentListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_for_scope(
operation_config: &crate::OperationConfig,
scope: &str,
filter: Option<&str>,
) -> std::result::Result<RoleAssignmentListResult, list_for_scope::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/roleAssignments",
operation_config.base_path(),
scope
);
let mut url = url::Url::parse(url_str).map_err(list_for_scope::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_for_scope::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_for_scope::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_for_scope::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignmentListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_for_scope::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_for_scope::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_for_scope::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_for_scope {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
| {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/providers/Microsoft.Authorization/roleAssignments/{}",
operation_config.base_path(),
scope,
role_assignment_name
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: RoleAssignment =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
} |
stuckAnchor.controller.ts | import {IAnchorEntity} from './../core/entities/interfaces/IAnchorEntity';
export class StuckAnchorController { | * @input anchor - the any passed to us
*/
public anchor: IAnchorEntity;
public constructor() {
this.anchor.subscribe((anchor: IAnchorEntity) => this.onAnchorChangedCallback(anchor));
}
private onAnchorChangedCallback(anchor: IAnchorEntity) {
let bbox = (d3.select('.stuck-anchor').select('text')[0][0] as any).getBBox();
this.anchor.updateComponents({
width: bbox.width,
height: bbox.height
});
}
} | public static $inject: string[] = [];
/** |
frame.py | """
DataFrame
---------
An efficient 2D container for potentially mixed-type time series or other
labeled data series.
Similar to its R counterpart, data.frame, except providing automatic data
alignment and a host of useful data manipulation methods having to do with the
labeling information
"""
from __future__ import division
# pylint: disable=E1101,E1103
# pylint: disable=W0212,W0231,W0703,W0622
import functools
import collections
import itertools
import sys
import types
import warnings
from numpy import nan as NA
import numpy as np
import numpy.ma as ma
from pandas.core.common import (isnull, notnull, PandasError, _try_sort,
_default_index, _maybe_upcast, is_sequence,
_infer_dtype_from_scalar, _values_from_object,
is_list_like, _get_dtype, _maybe_box_datetimelike,
is_categorical_dtype, is_object_dtype, _possibly_infer_to_datetimelike)
from pandas.core.generic import NDFrame, _shared_docs
from pandas.core.index import Index, MultiIndex, _ensure_index
from pandas.core.indexing import (maybe_droplevels,
convert_to_index_sliceable,
check_bool_indexer)
from pandas.core.internals import (BlockManager,
create_block_manager_from_arrays,
create_block_manager_from_blocks)
from pandas.core.series import Series
from pandas.core.categorical import Categorical
import pandas.computation.expressions as expressions
from pandas.computation.eval import eval as _eval
from numpy import percentile as _quantile
from pandas.compat import(range, zip, lrange, lmap, lzip, StringIO, u,
OrderedDict, raise_with_traceback)
from pandas import compat
from pandas.sparse.array import SparseArray
from pandas.util.decorators import deprecate, Appender, Substitution, \
deprecate_kwarg
from pandas.tseries.period import PeriodIndex
from pandas.tseries.index import DatetimeIndex
import pandas.core.algorithms as algos
import pandas.core.common as com
import pandas.core.format as fmt
import pandas.core.nanops as nanops
import pandas.core.ops as ops
import pandas.lib as lib
import pandas.algos as _algos
from pandas.core.config import get_option
#----------------------------------------------------------------------
# Docstring templates
_shared_doc_kwargs = dict(axes='index, columns', klass='DataFrame',
axes_single_arg="{0,1,'index','columns'}")
_numeric_only_doc = """numeric_only : boolean, default None
Include only float, int, boolean data. If None, will attempt to use
everything, then use only numeric data
"""
_merge_doc = """
Merge DataFrame objects by performing a database-style join operation by
columns or indexes.
If joining columns on columns, the DataFrame indexes *will be
ignored*. Otherwise if joining indexes on indexes or indexes on a column or
columns, the index will be passed on.
Parameters
----------%s
right : DataFrame
how : {'left', 'right', 'outer', 'inner'}, default 'inner'
* left: use only keys from left frame (SQL: left outer join)
* right: use only keys from right frame (SQL: right outer join)
* outer: use union of keys from both frames (SQL: full outer join)
* inner: use intersection of keys from both frames (SQL: inner join)
on : label or list
Field names to join on. Must be found in both DataFrames. If on is
None and not merging on indexes, then it merges on the intersection of
the columns by default.
left_on : label or list, or array-like
Field names to join on in left DataFrame. Can be a vector or list of
vectors of the length of the DataFrame to use a particular vector as
the join key instead of columns
right_on : label or list, or array-like
Field names to join on in right DataFrame or vector/list of vectors per
left_on docs
left_index : boolean, default False
Use the index from the left DataFrame as the join key(s). If it is a
MultiIndex, the number of keys in the other DataFrame (either the index
or a number of columns) must match the number of levels
right_index : boolean, default False
Use the index from the right DataFrame as the join key. Same caveats as
left_index
sort : boolean, default False
Sort the join keys lexicographically in the result DataFrame
suffixes : 2-length sequence (tuple, list, ...)
Suffix to apply to overlapping column names in the left and right
side, respectively
copy : boolean, default True
If False, do not copy data unnecessarily
Examples
--------
>>> A >>> B
lkey value rkey value
0 foo 1 0 foo 5
1 bar 2 1 bar 6
2 baz 3 2 qux 7
3 foo 4 3 bar 8
>>> merge(A, B, left_on='lkey', right_on='rkey', how='outer')
lkey value_x rkey value_y
0 foo 1 foo 5
1 foo 4 foo 5
2 bar 2 bar 6
3 bar 2 bar 8
4 baz 3 NaN NaN
5 NaN NaN qux 7
Returns
-------
merged : DataFrame
The output type will the be same as 'left', if it is a subclass
of DataFrame.
"""
#----------------------------------------------------------------------
# DataFrame class
class DataFrame(NDFrame):
""" Two-dimensional size-mutable, potentially heterogeneous tabular data
structure with labeled axes (rows and columns). Arithmetic operations
align on both row and column labels. Can be thought of as a dict-like
container for Series objects. The primary pandas data structure
Parameters
----------
data : numpy ndarray (structured or homogeneous), dict, or DataFrame
Dict can contain Series, arrays, constants, or list-like objects
index : Index or array-like
Index to use for resulting frame. Will default to np.arange(n) if
no indexing information part of input data and no index provided
columns : Index or array-like
Column labels to use for resulting frame. Will default to
np.arange(n) if no column labels are provided
dtype : dtype, default None
Data type to force, otherwise infer
copy : boolean, default False
Copy data from inputs. Only affects DataFrame / 2d ndarray input
Examples
--------
>>> d = {'col1': ts1, 'col2': ts2}
>>> df = DataFrame(data=d, index=index)
>>> df2 = DataFrame(np.random.randn(10, 5))
>>> df3 = DataFrame(np.random.randn(10, 5),
... columns=['a', 'b', 'c', 'd', 'e'])
See also
--------
DataFrame.from_records : constructor from tuples, also record arrays
DataFrame.from_dict : from dicts of Series, arrays, or dicts
DataFrame.from_csv : from CSV files
DataFrame.from_items : from sequence of (key, value) pairs
pandas.read_csv, pandas.read_table, pandas.read_clipboard
"""
_auto_consolidate = True
@property
def _constructor(self):
return DataFrame
_constructor_sliced = Series
def __init__(self, data=None, index=None, columns=None, dtype=None,
copy=False):
if data is None:
data = {}
if dtype is not None:
dtype = self._validate_dtype(dtype)
if isinstance(data, DataFrame):
data = data._data
if isinstance(data, BlockManager):
mgr = self._init_mgr(data, axes=dict(index=index, columns=columns),
dtype=dtype, copy=copy)
elif isinstance(data, dict):
mgr = self._init_dict(data, index, columns, dtype=dtype)
elif isinstance(data, ma.MaskedArray):
import numpy.ma.mrecords as mrecords
# masked recarray
if isinstance(data, mrecords.MaskedRecords):
mgr = _masked_rec_array_to_mgr(data, index, columns, dtype,
copy)
# a masked array
else:
mask = ma.getmaskarray(data)
if mask.any():
data, fill_value = _maybe_upcast(data, copy=True)
data[mask] = fill_value
else:
data = data.copy()
mgr = self._init_ndarray(data, index, columns, dtype=dtype,
copy=copy)
elif isinstance(data, (np.ndarray, Series, Index)):
if data.dtype.names:
data_columns = list(data.dtype.names)
data = dict((k, data[k]) for k in data_columns)
if columns is None:
columns = data_columns
mgr = self._init_dict(data, index, columns, dtype=dtype)
elif getattr(data, 'name', None):
mgr = self._init_dict({data.name: data}, index, columns,
dtype=dtype)
else:
mgr = self._init_ndarray(data, index, columns, dtype=dtype,
copy=copy)
elif isinstance(data, (list, types.GeneratorType)):
if isinstance(data, types.GeneratorType):
data = list(data)
if len(data) > 0:
if is_list_like(data[0]) and getattr(data[0], 'ndim', 1) == 1:
arrays, columns = _to_arrays(data, columns, dtype=dtype)
columns = _ensure_index(columns)
# set the index
if index is None:
if isinstance(data[0], Series):
index = _get_names_from_index(data)
elif isinstance(data[0], Categorical):
index = _default_index(len(data[0]))
else:
index = _default_index(len(data))
mgr = _arrays_to_mgr(arrays, columns, index, columns,
dtype=dtype)
else:
mgr = self._init_ndarray(data, index, columns, dtype=dtype,
copy=copy)
else:
mgr = self._init_ndarray(data, index, columns, dtype=dtype,
copy=copy)
elif isinstance(data, collections.Iterator):
raise TypeError("data argument can't be an iterator")
else:
try:
arr = np.array(data, dtype=dtype, copy=copy)
except (ValueError, TypeError) as e:
exc = TypeError('DataFrame constructor called with '
'incompatible data and dtype: %s' % e)
raise_with_traceback(exc)
if arr.ndim == 0 and index is not None and columns is not None:
if isinstance(data, compat.string_types) and dtype is None:
dtype = np.object_
if dtype is None:
dtype, data = _infer_dtype_from_scalar(data)
values = np.empty((len(index), len(columns)), dtype=dtype)
values.fill(data)
mgr = self._init_ndarray(values, index, columns, dtype=dtype,
copy=False)
else:
raise PandasError('DataFrame constructor not properly called!')
NDFrame.__init__(self, mgr, fastpath=True)
def _init_dict(self, data, index, columns, dtype=None):
"""
Segregate Series based on type and coerce into matrices.
Needs to handle a lot of exceptional cases.
"""
if columns is not None:
columns = _ensure_index(columns)
# prefilter if columns passed
data = dict((k, v) for k, v in compat.iteritems(data)
if k in columns)
if index is None:
index = extract_index(list(data.values()))
else:
index = _ensure_index(index)
arrays = []
data_names = []
for k in columns:
if k not in data:
# no obvious "empty" int column
if dtype is not None and issubclass(dtype.type,
np.integer):
continue
if dtype is None:
# 1783
v = np.empty(len(index), dtype=object)
else:
v = np.empty(len(index), dtype=dtype)
v.fill(NA)
else:
v = data[k]
data_names.append(k)
arrays.append(v)
else:
keys = list(data.keys())
if not isinstance(data, OrderedDict):
keys = _try_sort(keys)
columns = data_names = Index(keys)
arrays = [data[k] for k in keys]
return _arrays_to_mgr(arrays, data_names, index, columns,
dtype=dtype)
def _init_ndarray(self, values, index, columns, dtype=None,
copy=False):
# input must be a ndarray, list, Series, index
if isinstance(values, Series):
if columns is None:
if values.name is not None:
columns = [values.name]
if index is None:
index = values.index
else:
values = values.reindex(index)
# zero len case (GH #2234)
if not len(values) and columns is not None and len(columns):
values = np.empty((0, 1), dtype=object)
# helper to create the axes as indexes
def _get_axes(N, K, index=index, columns=columns):
# return axes or defaults
if index is None:
index = _default_index(N)
else:
index = _ensure_index(index)
if columns is None:
columns = _default_index(K)
else:
columns = _ensure_index(columns)
return index, columns
# we could have a categorical type passed or coerced to 'category'
# recast this to an _arrays_to_mgr
if is_categorical_dtype(getattr(values,'dtype',None)) or is_categorical_dtype(dtype):
if not hasattr(values,'dtype'):
values = _prep_ndarray(values, copy=copy)
values = values.ravel()
elif copy:
values = values.copy()
index, columns = _get_axes(len(values),1)
return _arrays_to_mgr([ values ], columns, index, columns,
dtype=dtype)
# by definition an array here
# the dtypes will be coerced to a single dtype
values = _prep_ndarray(values, copy=copy)
if dtype is not None:
if values.dtype != dtype:
try:
values = values.astype(dtype)
except Exception as orig:
e = ValueError("failed to cast to '%s' (Exception was: %s)"
% (dtype, orig))
raise_with_traceback(e)
index, columns = _get_axes(*values.shape)
values = values.T
# if we don't have a dtype specified, then try to convert objects
# on the entire block; this is to convert if we have datetimelike's
# embedded in an object type
if dtype is None and is_object_dtype(values):
values = _possibly_infer_to_datetimelike(values)
return create_block_manager_from_blocks([values], [columns, index])
@property
def axes(self):
return [self.index, self.columns]
@property
def shape(self):
return (len(self.index), len(self.columns))
def _repr_fits_vertical_(self):
"""
Check length against max_rows.
"""
max_rows = get_option("display.max_rows")
return len(self) <= max_rows
def _repr_fits_horizontal_(self, ignore_width=False):
"""
Check if full repr fits in horizontal boundaries imposed by the display
options width and max_columns. In case off non-interactive session, no
boundaries apply.
ignore_width is here so ipnb+HTML output can behave the way
users expect. display.max_columns remains in effect.
GH3541, GH3573
"""
width, height = fmt.get_console_size()
max_columns = get_option("display.max_columns")
nb_columns = len(self.columns)
# exceed max columns
if ((max_columns and nb_columns > max_columns) or
((not ignore_width) and width and nb_columns > (width // 2))):
return False
if (ignore_width # used by repr_html under IPython notebook
# scripts ignore terminal dims
or not com.in_interactive_session()):
return True
if (get_option('display.width') is not None or
com.in_ipython_frontend()):
# check at least the column row for excessive width
max_rows = 1
else:
max_rows = get_option("display.max_rows")
# when auto-detecting, so width=None and not in ipython front end
# check whether repr fits horizontal by actualy checking
# the width of the rendered repr
buf = StringIO()
# only care about the stuff we'll actually print out
# and to_string on entire frame may be expensive
d = self
if not (max_rows is None): # unlimited rows
# min of two, where one may be None
d = d.iloc[:min(max_rows, len(d))]
else:
return True
d.to_string(buf=buf)
value = buf.getvalue()
repr_width = max([len(l) for l in value.split('\n')])
return repr_width < width
def _info_repr(self):
"""True if the repr should show the info view."""
info_repr_option = (get_option("display.large_repr") == "info")
return info_repr_option and not (
self._repr_fits_horizontal_() and self._repr_fits_vertical_()
)
def __unicode__(self):
"""
Return a string representation for a particular DataFrame
Invoked by unicode(df) in py2 only. Yields a Unicode String in both
py2/py3.
"""
buf = StringIO(u(""))
if self._info_repr():
self.info(buf=buf)
return buf.getvalue()
max_rows = get_option("display.max_rows")
max_cols = get_option("display.max_columns")
show_dimensions = get_option("display.show_dimensions")
if get_option("display.expand_frame_repr"):
width, _ = fmt.get_console_size()
else:
width = None
self.to_string(buf=buf, max_rows=max_rows, max_cols=max_cols,
line_width=width, show_dimensions=show_dimensions)
return buf.getvalue()
def _repr_html_(self):
"""
Return a html representation for a particular DataFrame.
Mainly for IPython notebook.
"""
# qtconsole doesn't report its line width, and also
# behaves badly when outputting an HTML table
# that doesn't fit the window, so disable it.
# XXX: In IPython 3.x and above, the Qt console will not attempt to
# display HTML, so this check can be removed when support for IPython 2.x
# is no longer needed.
if com.in_qtconsole():
# 'HTML output is disabled in QtConsole'
return None
if self._info_repr():
buf = StringIO(u(""))
self.info(buf=buf)
# need to escape the <class>, should be the first line.
val = buf.getvalue().replace('<', r'<', 1).replace('>',
r'>', 1)
return '<pre>' + val + '</pre>'
if get_option("display.notebook_repr_html"):
max_rows = get_option("display.max_rows")
max_cols = get_option("display.max_columns")
show_dimensions = get_option("display.show_dimensions")
return ('<div style="max-height:1000px;'
'max-width:1500px;overflow:auto;">\n' +
self.to_html(max_rows=max_rows, max_cols=max_cols,
show_dimensions=show_dimensions) + '\n</div>')
else:
return None
def iteritems(self):
"""Iterator over (column, series) pairs"""
if self.columns.is_unique and hasattr(self, '_item_cache'):
for k in self.columns:
yield k, self._get_item_cache(k)
else:
for i, k in enumerate(self.columns):
yield k, self.icol(i)
def iterrows(self):
"""
Iterate over rows of DataFrame as (index, Series) pairs.
Notes
-----
* ``iterrows`` does **not** preserve dtypes across the rows (dtypes
are preserved across columns for DataFrames). For example,
>>> df = DataFrame([[1, 1.0]], columns=['x', 'y'])
>>> row = next(df.iterrows())[1]
>>> print(row['x'].dtype)
float64
>>> print(df['x'].dtype)
int64
Returns
-------
it : generator
A generator that iterates over the rows of the frame.
"""
columns = self.columns
for k, v in zip(self.index, self.values):
s = Series(v, index=columns, name=k)
yield k, s
def itertuples(self, index=True):
"""
Iterate over rows of DataFrame as tuples, with index value
as first element of the tuple
"""
arrays = []
if index:
arrays.append(self.index)
# use integer indexing because of possible duplicate column names
arrays.extend(self.iloc[:, k] for k in range(len(self.columns)))
return zip(*arrays)
if compat.PY3: # pragma: no cover
items = iteritems
def __len__(self):
"""Returns length of info axis, but here we use the index """
return len(self.index)
def dot(self, other):
"""
Matrix multiplication with DataFrame or Series objects
Parameters
----------
other : DataFrame or Series
Returns
-------
dot_product : DataFrame or Series
"""
if isinstance(other, (Series, DataFrame)):
common = self.columns.union(other.index)
if (len(common) > len(self.columns) or
len(common) > len(other.index)):
raise ValueError('matrices are not aligned')
left = self.reindex(columns=common, copy=False)
right = other.reindex(index=common, copy=False)
lvals = left.values
rvals = right.values
else:
left = self
lvals = self.values
rvals = np.asarray(other)
if lvals.shape[1] != rvals.shape[0]:
raise ValueError('Dot product shape mismatch, %s vs %s' %
(lvals.shape, rvals.shape))
if isinstance(other, DataFrame):
return self._constructor(np.dot(lvals, rvals),
index=left.index,
columns=other.columns)
elif isinstance(other, Series):
return Series(np.dot(lvals, rvals), index=left.index)
elif isinstance(rvals, (np.ndarray, Index)):
result = np.dot(lvals, rvals)
if result.ndim == 2:
return self._constructor(result, index=left.index)
else:
return Series(result, index=left.index)
else: # pragma: no cover
raise TypeError('unsupported type: %s' % type(other))
#----------------------------------------------------------------------
# IO methods (to / from other formats)
@classmethod
def from_dict(cls, data, orient='columns', dtype=None):
"""
Construct DataFrame from dict of array-like or dicts
Parameters
----------
data : dict
{field : array-like} or {field : dict}
orient : {'columns', 'index'}, default 'columns'
The "orientation" of the data. If the keys of the passed dict
should be the columns of the resulting DataFrame, pass 'columns'
(default). Otherwise if the keys should be rows, pass 'index'.
Returns
-------
DataFrame
"""
index, columns = None, None
orient = orient.lower()
if orient == 'index':
if len(data) > 0:
# TODO speed up Series case
if isinstance(list(data.values())[0], (Series, dict)):
data = _from_nested_dict(data)
else:
data, index = list(data.values()), list(data.keys())
elif orient != 'columns': # pragma: no cover
raise ValueError('only recognize index or columns for orient')
return cls(data, index=index, columns=columns, dtype=dtype)
@deprecate_kwarg(old_arg_name='outtype', new_arg_name='orient')
def to_dict(self, orient='dict'):
"""Convert DataFrame to dictionary.
Parameters
----------
orient : str {'dict', 'list', 'series', 'split', 'records'}
Determines the type of the values of the dictionary.
- dict (default) : dict like {column -> {index -> value}}
- list : dict like {column -> [values]}
- series : dict like {column -> Series(values)}
- split : dict like
{index -> [index], columns -> [columns], data -> [values]}
- records : list like
[{column -> value}, ... , {column -> value}]
Abbreviations are allowed. `s` indicates `series` and `sp`
indicates `split`.
Returns
-------
result : dict like {column -> {index -> value}}
"""
if not self.columns.is_unique:
warnings.warn("DataFrame columns are not unique, some "
"columns will be omitted.", UserWarning)
if orient.lower().startswith('d'):
return dict((k, v.to_dict()) for k, v in compat.iteritems(self))
elif orient.lower().startswith('l'):
return dict((k, v.tolist()) for k, v in compat.iteritems(self))
elif orient.lower().startswith('sp'):
return {'index': self.index.tolist(),
'columns': self.columns.tolist(),
'data': self.values.tolist()}
elif orient.lower().startswith('s'):
return dict((k, v) for k, v in compat.iteritems(self))
elif orient.lower().startswith('r'):
return [dict((k, v) for k, v in zip(self.columns, row))
for row in self.values]
else:
raise ValueError("orient '%s' not understood" % orient)
def to_gbq(self, destination_table, project_id=None, chunksize=10000,
verbose=True, reauth=False):
"""Write a DataFrame to a Google BigQuery table.
THIS IS AN EXPERIMENTAL LIBRARY
If the table exists, the dataframe will be written to the table using
the defined table schema and column types. For simplicity, this method
uses the Google BigQuery streaming API. The to_gbq method chunks data
into a default chunk size of 10,000. Failures return the complete error
response which can be quite long depending on the size of the insert.
There are several important limitations of the Google streaming API
which are detailed at:
https://developers.google.com/bigquery/streaming-data-into-bigquery.
Parameters
----------
dataframe : DataFrame
DataFrame to be written
destination_table : string
Name of table to be written, in the form 'dataset.tablename'
project_id : str
Google BigQuery Account project ID.
chunksize : int (default 10000)
Number of rows to be inserted in each chunk from the dataframe.
verbose : boolean (default True)
Show percentage complete
reauth : boolean (default False)
Force Google BigQuery to reauthenticate the user. This is useful
if multiple accounts are used.
"""
from pandas.io import gbq
return gbq.to_gbq(self, destination_table, project_id=project_id,
chunksize=chunksize, verbose=verbose,
reauth=reauth)
@classmethod
def from_records(cls, data, index=None, exclude=None, columns=None,
coerce_float=False, nrows=None):
"""
Convert structured or record ndarray to DataFrame
Parameters
----------
data : ndarray (structured dtype), list of tuples, dict, or DataFrame
index : string, list of fields, array-like
Field of array to use as the index, alternately a specific set of
input labels to use
exclude : sequence, default None
Columns or fields to exclude
columns : sequence, default None
Column names to use. If the passed data do not have names
associated with them, this argument provides names for the
columns. Otherwise this argument indicates the order of the columns
in the result (any names not found in the data will become all-NA
columns)
coerce_float : boolean, default False
Attempt to convert values to non-string, non-numeric objects (like
decimal.Decimal) to floating point, useful for SQL result sets
Returns
-------
df : DataFrame
"""
# Make a copy of the input columns so we can modify it
if columns is not None:
columns = _ensure_index(columns)
if com.is_iterator(data):
if nrows == 0:
return cls()
try:
if compat.PY3:
first_row = next(data)
else:
first_row = next(data)
except StopIteration:
return cls(index=index, columns=columns)
dtype = None
if hasattr(first_row, 'dtype') and first_row.dtype.names:
dtype = first_row.dtype
values = [first_row]
if nrows is None:
values += data
else:
values.extend(itertools.islice(data, nrows - 1))
if dtype is not None:
data = np.array(values, dtype=dtype)
else:
data = values
if isinstance(data, dict):
if columns is None:
columns = arr_columns = _ensure_index(sorted(data))
arrays = [data[k] for k in columns]
else:
arrays = []
arr_columns = []
for k, v in compat.iteritems(data):
if k in columns:
arr_columns.append(k)
arrays.append(v)
arrays, arr_columns = _reorder_arrays(arrays, arr_columns,
columns)
elif isinstance(data, (np.ndarray, DataFrame)):
arrays, columns = _to_arrays(data, columns)
if columns is not None:
columns = _ensure_index(columns)
arr_columns = columns
else:
arrays, arr_columns = _to_arrays(data, columns,
coerce_float=coerce_float)
arr_columns = _ensure_index(arr_columns)
if columns is not None:
columns = _ensure_index(columns)
else:
columns = arr_columns
if exclude is None:
exclude = set()
else:
exclude = set(exclude)
result_index = None
if index is not None:
if (isinstance(index, compat.string_types) or
not hasattr(index, "__iter__")):
i = columns.get_loc(index)
exclude.add(index)
if len(arrays) > 0:
result_index = Index(arrays[i], name=index)
else:
result_index = Index([], name=index)
else:
try:
to_remove = [arr_columns.get_loc(field) for field in index]
result_index = MultiIndex.from_arrays(
[arrays[i] for i in to_remove], names=index)
exclude.update(index)
except Exception:
result_index = index
if any(exclude):
arr_exclude = [x for x in exclude if x in arr_columns]
to_remove = [arr_columns.get_loc(col) for col in arr_exclude]
arrays = [v for i, v in enumerate(arrays) if i not in to_remove]
arr_columns = arr_columns.drop(arr_exclude)
columns = columns.drop(exclude)
mgr = _arrays_to_mgr(arrays, arr_columns, result_index,
columns)
return cls(mgr)
def to_records(self, index=True, convert_datetime64=True):
"""
Convert DataFrame to record array. Index will be put in the
'index' field of the record array if requested
Parameters
----------
index : boolean, default True
Include index in resulting record array, stored in 'index' field
convert_datetime64 : boolean, default True
Whether to convert the index to datetime.datetime if it is a
DatetimeIndex
Returns
-------
y : recarray
"""
if index:
if com.is_datetime64_dtype(self.index) and convert_datetime64:
ix_vals = [self.index.to_pydatetime()]
else:
if isinstance(self.index, MultiIndex):
# array of tuples to numpy cols. copy copy copy
ix_vals = lmap(np.array, zip(*self.index.values))
else:
ix_vals = [self.index.values]
arrays = ix_vals + [self[c].get_values() for c in self.columns]
count = 0
index_names = list(self.index.names)
if isinstance(self.index, MultiIndex):
for i, n in enumerate(index_names):
if n is None:
index_names[i] = 'level_%d' % count
count += 1
elif index_names[0] is None:
index_names = ['index']
names = index_names + lmap(str, self.columns)
else:
arrays = [self[c].get_values() for c in self.columns]
names = lmap(str, self.columns)
dtype = np.dtype([(x, v.dtype) for x, v in zip(names, arrays)])
return np.rec.fromarrays(arrays, dtype=dtype, names=names)
@classmethod
def from_items(cls, items, columns=None, orient='columns'):
"""
Convert (key, value) pairs to DataFrame. The keys will be the axis
index (usually the columns, but depends on the specified
orientation). The values should be arrays or Series.
Parameters
----------
items : sequence of (key, value) pairs
Values should be arrays or Series.
columns : sequence of column labels, optional
Must be passed if orient='index'.
orient : {'columns', 'index'}, default 'columns'
The "orientation" of the data. If the keys of the
input correspond to column labels, pass 'columns'
(default). Otherwise if the keys correspond to the index,
pass 'index'.
Returns
-------
frame : DataFrame
"""
keys, values = lzip(*items)
if orient == 'columns':
if columns is not None:
columns = _ensure_index(columns)
idict = dict(items)
if len(idict) < len(items):
if not columns.equals(_ensure_index(keys)):
raise ValueError('With non-unique item names, passed '
'columns must be identical')
arrays = values
else:
arrays = [idict[k] for k in columns if k in idict]
else:
columns = _ensure_index(keys)
arrays = values
return cls._from_arrays(arrays, columns, None)
elif orient == 'index':
if columns is None:
raise TypeError("Must pass columns with orient='index'")
keys = _ensure_index(keys)
arr = np.array(values, dtype=object).T
data = [lib.maybe_convert_objects(v) for v in arr]
return cls._from_arrays(data, columns, keys)
else: # pragma: no cover
raise ValueError("'orient' must be either 'columns' or 'index'")
@classmethod
def _from_arrays(cls, arrays, columns, index, dtype=None):
mgr = _arrays_to_mgr(arrays, columns, index, columns, dtype=dtype)
return cls(mgr)
@classmethod
def from_csv(cls, path, header=0, sep=',', index_col=0,
parse_dates=True, encoding=None, tupleize_cols=False,
infer_datetime_format=False):
"""
Read delimited file into DataFrame
Parameters
----------
path : string file path or file handle / StringIO
header : int, default 0
Row to use at header (skip prior rows)
sep : string, default ','
Field delimiter
index_col : int or sequence, default 0
Column to use for index. If a sequence is given, a MultiIndex
is used. Different default from read_table
parse_dates : boolean, default True
Parse dates. Different default from read_table
tupleize_cols : boolean, default False
write multi_index columns as a list of tuples (if True)
or new (expanded format) if False)
infer_datetime_format: boolean, default False
If True and `parse_dates` is True for a column, try to infer the
datetime format based on the first datetime string. If the format
can be inferred, there often will be a large parsing speed-up.
Notes
-----
Preferable to use read_table for most general purposes but from_csv
makes for an easy roundtrip to and from file, especially with a
DataFrame of time series data
Returns
-------
y : DataFrame
"""
from pandas.io.parsers import read_table
return read_table(path, header=header, sep=sep,
parse_dates=parse_dates, index_col=index_col,
encoding=encoding, tupleize_cols=tupleize_cols,
infer_datetime_format=infer_datetime_format)
def to_sparse(self, fill_value=None, kind='block'):
"""
Convert to SparseDataFrame
Parameters
----------
fill_value : float, default NaN
kind : {'block', 'integer'}
Returns
-------
y : SparseDataFrame
"""
from pandas.core.sparse import SparseDataFrame
return SparseDataFrame(self._series, index=self.index,
default_kind=kind,
default_fill_value=fill_value)
def to_panel(self):
"""
Transform long (stacked) format (DataFrame) into wide (3D, Panel)
format.
Currently the index of the DataFrame must be a 2-level MultiIndex. This
may be generalized later
Returns
-------
panel : Panel
"""
from pandas.core.panel import Panel
# only support this kind for now
if (not isinstance(self.index, MultiIndex) or # pragma: no cover
len(self.index.levels) != 2):
raise NotImplementedError('Only 2-level MultiIndex are supported.')
if not self.index.is_unique:
raise ValueError("Can't convert non-uniquely indexed "
"DataFrame to Panel")
self._consolidate_inplace()
# minor axis must be sorted
if self.index.lexsort_depth < 2:
selfsorted = self.sortlevel(0)
else:
selfsorted = self
major_axis, minor_axis = selfsorted.index.levels
major_labels, minor_labels = selfsorted.index.labels
shape = len(major_axis), len(minor_axis)
# preserve names, if any
major_axis = major_axis.copy()
major_axis.name = self.index.names[0]
minor_axis = minor_axis.copy()
minor_axis.name = self.index.names[1]
# create new axes
new_axes = [selfsorted.columns, major_axis, minor_axis]
# create new manager
new_mgr = selfsorted._data.reshape_nd(axes=new_axes,
labels=[major_labels, minor_labels],
shape=shape,
ref_items=selfsorted.columns)
return Panel(new_mgr)
to_wide = deprecate('to_wide', to_panel)
def to_csv(self, path_or_buf=None, sep=",", na_rep='', float_format=None,
columns=None, header=True, index=True, index_label=None,
mode='w', encoding=None, quoting=None,
quotechar='"', line_terminator='\n', chunksize=None,
tupleize_cols=False, date_format=None, doublequote=True,
escapechar=None, decimal='.', **kwds):
r"""Write DataFrame to a comma-separated values (csv) file
Parameters
----------
path_or_buf : string or file handle, default None
File path or object, if None is provided the result is returned as
a string.
sep : character, default ","
Field delimiter for the output file.
na_rep : string, default ''
Missing data representation
float_format : string, default None
Format string for floating point numbers
columns : sequence, optional
Columns to write
header : boolean or list of string, default True
Write out column names. If a list of string is given it is assumed
to be aliases for the column names
index : boolean, default True
Write row names (index)
index_label : string or sequence, or False, default None
Column label for index column(s) if desired. If None is given, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the DataFrame uses MultiIndex. If
False do not print fields for index names. Use index_label=False
for easier importing in R
nanRep : None
deprecated, use na_rep
mode : str
Python write mode, default 'w'
encoding : string, optional
A string representing the encoding to use in the output file,
defaults to 'ascii' on Python 2 and 'utf-8' on Python 3.
line_terminator : string, default '\\n'
The newline character or character sequence to use in the output
file
quoting : optional constant from csv module
defaults to csv.QUOTE_MINIMAL
quotechar : string (length 1), default '"'
character used to quote fields
doublequote : boolean, default True
Control quoting of `quotechar` inside a field
escapechar : string (length 1), default None
character used to escape `sep` and `quotechar` when appropriate
chunksize : int or None
rows to write at a time
tupleize_cols : boolean, default False
write multi_index columns as a list of tuples (if True)
or new (expanded format) if False)
date_format : string, default None
Format string for datetime objects
decimal: string, default '.'
Character recognized as decimal separator. E.g. use ',' for European data
"""
formatter = fmt.CSVFormatter(self, path_or_buf,
line_terminator=line_terminator,
sep=sep, encoding=encoding,
quoting=quoting, na_rep=na_rep,
float_format=float_format, cols=columns,
header=header, index=index,
index_label=index_label, mode=mode,
chunksize=chunksize, quotechar=quotechar,
engine=kwds.get("engine"),
tupleize_cols=tupleize_cols,
date_format=date_format,
doublequote=doublequote,
escapechar=escapechar,
decimal=decimal)
formatter.save()
if path_or_buf is None:
return formatter.path_or_buf.getvalue()
def to_excel(self, excel_writer, sheet_name='Sheet1', na_rep='',
float_format=None, columns=None, header=True, index=True,
index_label=None, startrow=0, startcol=0, engine=None,
merge_cells=True, encoding=None, inf_rep='inf'):
"""
Write DataFrame to a excel sheet
Parameters
----------
excel_writer : string or ExcelWriter object
File path or existing ExcelWriter
sheet_name : string, default 'Sheet1'
Name of sheet which will contain DataFrame
na_rep : string, default ''
Missing data representation
float_format : string, default None
Format string for floating point numbers
columns : sequence, optional
Columns to write
header : boolean or list of string, default True
Write out column names. If a list of string is given it is
assumed to be aliases for the column names
index : boolean, default True
Write row names (index)
index_label : string or sequence, default None
Column label for index column(s) if desired. If None is given, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the DataFrame uses MultiIndex.
startrow :
upper left cell row to dump data frame
startcol :
upper left cell column to dump data frame
engine : string, default None
write engine to use - you can also set this via the options
``io.excel.xlsx.writer``, ``io.excel.xls.writer``, and
``io.excel.xlsm.writer``.
merge_cells : boolean, default True
Write MultiIndex and Hierarchical Rows as merged cells.
encoding: string, default None
encoding of the resulting excel file. Only necessary for xlwt,
other writers support unicode natively.
inf_rep : string, default 'inf'
Representation for infinity (there is no native representation for
infinity in Excel)
Notes
-----
If passing an existing ExcelWriter object, then the sheet will be added
to the existing workbook. This can be used to save different
DataFrames to one workbook:
>>> writer = ExcelWriter('output.xlsx')
>>> df1.to_excel(writer,'Sheet1')
>>> df2.to_excel(writer,'Sheet2')
>>> writer.save()
"""
from pandas.io.excel import ExcelWriter
need_save = False
if encoding == None:
encoding = 'ascii'
if isinstance(excel_writer, compat.string_types):
excel_writer = ExcelWriter(excel_writer, engine=engine)
need_save = True
formatter = fmt.ExcelFormatter(self,
na_rep=na_rep,
cols=columns,
header=header,
float_format=float_format,
index=index,
index_label=index_label,
merge_cells=merge_cells,
inf_rep=inf_rep)
formatted_cells = formatter.get_formatted_cells()
excel_writer.write_cells(formatted_cells, sheet_name,
startrow=startrow, startcol=startcol)
if need_save:
excel_writer.save()
def to_stata(
self, fname, convert_dates=None, write_index=True, encoding="latin-1",
byteorder=None, time_stamp=None, data_label=None):
"""
A class for writing Stata binary dta files from array-like objects
Parameters
----------
fname : file path or buffer
Where to save the dta file.
convert_dates : dict
Dictionary mapping column of datetime types to the stata internal
format that you want to use for the dates. Options are
'tc', 'td', 'tm', 'tw', 'th', 'tq', 'ty'. Column can be either a
number or a name.
encoding : str
Default is latin-1. Note that Stata does not support unicode.
byteorder : str
Can be ">", "<", "little", or "big". The default is None which uses
`sys.byteorder`
Examples
--------
>>> writer = StataWriter('./data_file.dta', data)
>>> writer.write_file()
Or with dates
>>> writer = StataWriter('./date_data_file.dta', data, {2 : 'tw'})
>>> writer.write_file()
"""
from pandas.io.stata import StataWriter
writer = StataWriter(fname, self, convert_dates=convert_dates,
encoding=encoding, byteorder=byteorder,
time_stamp=time_stamp, data_label=data_label,
write_index=write_index)
writer.write_file()
@Appender(fmt.docstring_to_string, indents=1)
def to_string(self, buf=None, columns=None, col_space=None, colSpace=None,
header=True, index=True, na_rep='NaN', formatters=None,
float_format=None, sparsify=None, index_names=True,
justify=None, line_width=None, max_rows=None, max_cols=None,
show_dimensions=False):
"""
Render a DataFrame to a console-friendly tabular output.
"""
if colSpace is not None: # pragma: no cover
warnings.warn("colSpace is deprecated, use col_space",
FutureWarning)
col_space = colSpace
formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns,
col_space=col_space, na_rep=na_rep,
formatters=formatters,
float_format=float_format,
sparsify=sparsify,
justify=justify,
index_names=index_names,
header=header, index=index,
line_width=line_width,
max_rows=max_rows,
max_cols=max_cols,
show_dimensions=show_dimensions)
formatter.to_string()
if buf is None:
result = formatter.buf.getvalue()
return result
@Appender(fmt.docstring_to_string, indents=1)
def to_html(self, buf=None, columns=None, col_space=None, colSpace=None,
header=True, index=True, na_rep='NaN', formatters=None,
float_format=None, sparsify=None, index_names=True,
justify=None, bold_rows=True, classes=None, escape=True,
max_rows=None, max_cols=None, show_dimensions=False):
"""
Render a DataFrame as an HTML table.
`to_html`-specific options:
bold_rows : boolean, default True
Make the row labels bold in the output
classes : str or list or tuple, default None
CSS class(es) to apply to the resulting html table
escape : boolean, default True
Convert the characters <, >, and & to HTML-safe sequences.=
max_rows : int, optional
Maximum number of rows to show before truncating. If None, show
all.
max_cols : int, optional
Maximum number of columns to show before truncating. If None, show
all.
"""
if colSpace is not None: # pragma: no cover
warnings.warn("colSpace is deprecated, use col_space",
FutureWarning)
col_space = colSpace
formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns,
col_space=col_space, na_rep=na_rep,
formatters=formatters,
float_format=float_format,
sparsify=sparsify,
justify=justify,
index_names=index_names,
header=header, index=index,
bold_rows=bold_rows,
escape=escape,
max_rows=max_rows,
max_cols=max_cols,
show_dimensions=show_dimensions)
formatter.to_html(classes=classes)
if buf is None:
return formatter.buf.getvalue()
@Appender(fmt.docstring_to_string, indents=1)
def to_latex(self, buf=None, columns=None, col_space=None, colSpace=None,
header=True, index=True, na_rep='NaN', formatters=None,
float_format=None, sparsify=None, index_names=True,
bold_rows=True, longtable=False, escape=True):
"""
Render a DataFrame to a tabular environment table. You can splice
this into a LaTeX document. Requires \\usepackage{booktabs}.
`to_latex`-specific options:
bold_rows : boolean, default True
Make the row labels bold in the output
longtable : boolean, default False
Use a longtable environment instead of tabular. Requires adding
a \\usepackage{longtable} to your LaTeX preamble.
escape : boolean, default True
When set to False prevents from escaping latex special
characters in column names.
"""
if colSpace is not None: # pragma: no cover
warnings.warn("colSpace is deprecated, use col_space",
FutureWarning)
col_space = colSpace
formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns,
col_space=col_space, na_rep=na_rep,
header=header, index=index,
formatters=formatters,
float_format=float_format,
bold_rows=bold_rows,
sparsify=sparsify,
index_names=index_names,
escape=escape)
formatter.to_latex(longtable=longtable)
if buf is None:
return formatter.buf.getvalue()
def info(self, verbose=None, buf=None, max_cols=None, memory_usage=None, null_counts=None):
"""
Concise summary of a DataFrame.
Parameters
----------
verbose : {None, True, False}, optional
Whether to print the full summary.
None follows the `display.max_info_columns` setting.
True or False overrides the `display.max_info_columns` setting.
buf : writable buffer, defaults to sys.stdout
max_cols : int, default None
Determines whether full summary or short summary is printed.
None follows the `display.max_info_columns` setting.
memory_usage : boolean, default None
Specifies whether total memory usage of the DataFrame
elements (including index) should be displayed. None follows
the `display.memory_usage` setting. True or False overrides
the `display.memory_usage` setting. Memory usage is shown in
human-readable units (base-2 representation).
null_counts : boolean, default None
Whether to show the non-null counts
If None, then only show if the frame is smaller than max_info_rows and max_info_columns.
If True, always show counts.
If False, never show counts.
"""
from pandas.core.format import _put_lines
if buf is None: # pragma: no cover
buf = sys.stdout
lines = []
lines.append(str(type(self)))
lines.append(self.index.summary())
if len(self.columns) == 0:
lines.append('Empty %s' % type(self).__name__)
_put_lines(buf, lines)
return
cols = self.columns
# hack
if max_cols is None:
max_cols = get_option(
'display.max_info_columns', len(self.columns) + 1)
max_rows = get_option('display.max_info_rows', len(self) + 1)
if null_counts is None:
show_counts = ((len(self.columns) <= max_cols) and
(len(self) < max_rows))
else:
show_counts = null_counts
exceeds_info_cols = len(self.columns) > max_cols
def _verbose_repr():
lines.append('Data columns (total %d columns):' %
len(self.columns))
space = max([len(com.pprint_thing(k)) for k in self.columns]) + 4
counts = None
tmpl = "%s%s"
if show_counts:
counts = self.count()
if len(cols) != len(counts): # pragma: no cover
raise AssertionError('Columns must equal counts (%d != %d)' %
(len(cols), len(counts)))
tmpl = "%s non-null %s"
dtypes = self.dtypes
for i, col in enumerate(self.columns):
dtype = dtypes[col]
col = com.pprint_thing(col)
count = ""
if show_counts:
count = counts.iloc[i]
lines.append(_put_str(col, space) +
tmpl % (count, dtype))
def _non_verbose_repr():
lines.append(self.columns.summary(name='Columns'))
def _sizeof_fmt(num, size_qualifier):
# returns size in human readable format
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if num < 1024.0:
return "%3.1f%s %s" % (num, size_qualifier, x)
num /= 1024.0
return "%3.1f%s %s" % (num, size_qualifier, 'PB')
if verbose:
_verbose_repr()
elif verbose is False: # specifically set to False, not nesc None
_non_verbose_repr()
else:
if exceeds_info_cols:
_non_verbose_repr()
else:
_verbose_repr()
counts = self.get_dtype_counts()
dtypes = ['%s(%d)' % k for k in sorted(compat.iteritems(counts))]
lines.append('dtypes: %s' % ', '.join(dtypes))
if memory_usage is None:
memory_usage = get_option('display.memory_usage')
if memory_usage: # append memory usage of df to display
# size_qualifier is just a best effort; not guaranteed to catch all
# cases (e.g., it misses categorical data even with object
# categories)
size_qualifier = ('+' if 'object' in counts
or is_object_dtype(self.index) else '')
mem_usage = self.memory_usage(index=True).sum()
lines.append("memory usage: %s\n" %
_sizeof_fmt(mem_usage, size_qualifier))
_put_lines(buf, lines)
def memory_usage(self, index=False):
"""Memory usage of DataFrame columns.
Parameters
----------
index : bool
Specifies whether to include memory usage of DataFrame's
index in returned Series. If `index=True` (default is False)
the first index of the Series is `Index`.
Returns
-------
sizes : Series
A series with column names as index and memory usage of
columns with units of bytes.
Notes
-----
Memory usage does not include memory consumed by elements that
are not components of the array.
See Also
--------
numpy.ndarray.nbytes
"""
result = Series([ c.values.nbytes for col, c in self.iteritems() ],
index=self.columns)
if index:
result = Series(self.index.nbytes,
index=['Index']).append(result)
return result
def transpose(self):
"""Transpose index and columns"""
return super(DataFrame, self).transpose(1, 0)
T = property(transpose)
#----------------------------------------------------------------------
# Picklability
# legacy pickle formats
def _unpickle_frame_compat(self, state): # pragma: no cover
from pandas.core.common import _unpickle_array
if len(state) == 2: # pragma: no cover
series, idx = state
columns = sorted(series)
else:
series, cols, idx = state
columns = _unpickle_array(cols)
index = _unpickle_array(idx)
self._data = self._init_dict(series, index, columns, None)
def _unpickle_matrix_compat(self, state): # pragma: no cover
from pandas.core.common import _unpickle_array
# old unpickling
(vals, idx, cols), object_state = state
index = _unpickle_array(idx)
dm = DataFrame(vals, index=index, columns=_unpickle_array(cols),
copy=False)
if object_state is not None:
ovals, _, ocols = object_state
objects = DataFrame(ovals, index=index,
columns=_unpickle_array(ocols),
copy=False)
dm = dm.join(objects)
self._data = dm._data
#----------------------------------------------------------------------
#----------------------------------------------------------------------
# Getting and setting elements
def get_value(self, index, col, takeable=False):
"""
Quickly retrieve single value at passed column and index
Parameters
----------
index : row label
col : column label
takeable : interpret the index/col as indexers, default False
Returns
-------
value : scalar value
"""
if takeable:
series = self._iget_item_cache(col)
return _maybe_box_datetimelike(series.values[index])
series = self._get_item_cache(col)
engine = self.index._engine
return engine.get_value(series.get_values(), index)
def set_value(self, index, col, value, takeable=False):
"""
Put single value at passed column and index
Parameters
----------
index : row label
col : column label
value : scalar value
takeable : interpret the index/col as indexers, default False
Returns
-------
frame : DataFrame
If label pair is contained, will be reference to calling DataFrame,
otherwise a new object
"""
try:
if takeable is True:
series = self._iget_item_cache(col)
return series.set_value(index, value, takeable=True)
series = self._get_item_cache(col)
engine = self.index._engine
engine.set_value(series.values, index, value)
return self
except (KeyError, TypeError):
# set using a non-recursive method & reset the cache
self.loc[index, col] = value
self._item_cache.pop(col, None)
return self
def irow(self, i, copy=False):
return self._ixs(i, axis=0)
def icol(self, i):
return self._ixs(i, axis=1)
def _ixs(self, i, axis=0):
"""
i : int, slice, or sequence of integers
axis : int
"""
# irow
if axis == 0:
"""
Notes
-----
If slice passed, the resulting data will be a view
"""
if isinstance(i, slice):
return self[i]
else:
label = self.index[i]
if isinstance(label, Index):
# a location index by definition
result = self.take(i, axis=axis)
copy=True
else:
new_values = self._data.fast_xs(i)
# if we are a copy, mark as such
copy = isinstance(new_values,np.ndarray) and new_values.base is None
result = Series(new_values, index=self.columns,
name=self.index[i], dtype=new_values.dtype)
result._set_is_copy(self, copy=copy)
return result
# icol
else:
"""
Notes
-----
If slice passed, the resulting data will be a view
"""
label = self.columns[i]
if isinstance(i, slice):
# need to return view
lab_slice = slice(label[0], label[-1])
return self.ix[:, lab_slice]
else:
label = self.columns[i]
if isinstance(label, Index):
return self.take(i, axis=1, convert=True)
# if the values returned are not the same length
# as the index (iow a not found value), iget returns
# a 0-len ndarray. This is effectively catching
# a numpy error (as numpy should really raise)
values = self._data.iget(i)
if not len(values):
values = np.array([np.nan] * len(self.index), dtype=object)
result = self._constructor_sliced.from_array(
values, index=self.index,
name=label, fastpath=True)
# this is a cached value, mark it so
result._set_as_cached(label, self)
return result
def iget_value(self, i, j):
return self.iat[i, j]
def __getitem__(self, key):
# shortcut if we are an actual column
is_mi_columns = isinstance(self.columns, MultiIndex)
try:
if key in self.columns and not is_mi_columns:
return self._getitem_column(key)
except:
pass
# see if we can slice the rows
indexer = convert_to_index_sliceable(self, key)
if indexer is not None:
return self._getitem_slice(indexer)
if isinstance(key, (Series, np.ndarray, Index, list)):
# either boolean or fancy integer index
return self._getitem_array(key)
elif isinstance(key, DataFrame):
return self._getitem_frame(key)
elif is_mi_columns:
return self._getitem_multilevel(key)
else:
return self._getitem_column(key)
def _getitem_column(self, key):
""" return the actual column """
# get column
if self.columns.is_unique:
return self._get_item_cache(key)
# duplicate columns & possible reduce dimensionaility
result = self._constructor(self._data.get(key))
if result.columns.is_unique:
result = result[key]
return result
def _getitem_slice(self, key):
return self._slice(key, axis=0)
def _getitem_array(self, key):
# also raises Exception if object array with NA values
if com.is_bool_indexer(key):
# warning here just in case -- previously __setitem__ was
# reindexing but __getitem__ was not; it seems more reasonable to
# go with the __setitem__ behavior since that is more consistent
# with all other indexing behavior
if isinstance(key, Series) and not key.index.equals(self.index):
warnings.warn("Boolean Series key will be reindexed to match "
"DataFrame index.", UserWarning)
elif len(key) != len(self.index):
raise ValueError('Item wrong length %d instead of %d.' %
(len(key), len(self.index)))
# check_bool_indexer will throw exception if Series key cannot
# be reindexed to match DataFrame rows
key = check_bool_indexer(self.index, key)
indexer = key.nonzero()[0]
return self.take(indexer, axis=0, convert=False)
else:
indexer = self.ix._convert_to_indexer(key, axis=1)
return self.take(indexer, axis=1, convert=True)
def _getitem_multilevel(self, key):
loc = self.columns.get_loc(key)
if isinstance(loc, (slice, Series, np.ndarray, Index)):
new_columns = self.columns[loc]
result_columns = maybe_droplevels(new_columns, key)
if self._is_mixed_type:
result = self.reindex(columns=new_columns)
result.columns = result_columns
else:
new_values = self.values[:, loc]
result = DataFrame(new_values, index=self.index,
columns=result_columns).__finalize__(self)
if len(result.columns) == 1:
top = result.columns[0]
if ((type(top) == str and top == '') or
(type(top) == tuple and top[0] == '')):
result = result['']
if isinstance(result, Series):
result = Series(result, index=self.index, name=key)
result._set_is_copy(self)
return result
else:
return self._get_item_cache(key)
def _getitem_frame(self, key):
if key.values.dtype != np.bool_:
raise ValueError('Must pass DataFrame with boolean values only')
return self.where(key)
def query(self, expr, **kwargs):
"""Query the columns of a frame with a boolean expression.
.. versionadded:: 0.13
Parameters
----------
expr : string
The query string to evaluate. You can refer to variables
in the environment by prefixing them with an '@' character like
``@a + b``.
kwargs : dict
See the documentation for :func:`pandas.eval` for complete details
on the keyword arguments accepted by :meth:`DataFrame.query`.
Returns
-------
q : DataFrame
Notes
-----
The result of the evaluation of this expression is first passed to
:attr:`DataFrame.loc` and if that fails because of a
multidimensional key (e.g., a DataFrame) then the result will be passed
to :meth:`DataFrame.__getitem__`.
This method uses the top-level :func:`pandas.eval` function to
evaluate the passed query.
The :meth:`~pandas.DataFrame.query` method uses a slightly
modified Python syntax by default. For example, the ``&`` and ``|``
(bitwise) operators have the precedence of their boolean cousins,
:keyword:`and` and :keyword:`or`. This *is* syntactically valid Python,
however the semantics are different.
You can change the semantics of the expression by passing the keyword
argument ``parser='python'``. This enforces the same semantics as
evaluation in Python space. Likewise, you can pass ``engine='python'``
to evaluate an expression using Python itself as a backend. This is not
recommended as it is inefficient compared to using ``numexpr`` as the
engine.
The :attr:`DataFrame.index` and
:attr:`DataFrame.columns` attributes of the
:class:`~pandas.DataFrame` instance are placed in the query namespace
by default, which allows you to treat both the index and columns of the
frame as a column in the frame.
The identifier ``index`` is used for the frame index; you can also
use the name of the index to identify it in a query.
For further details and examples see the ``query`` documentation in
:ref:`indexing <indexing.query>`.
See Also
--------
pandas.eval
DataFrame.eval
Examples
--------
>>> from numpy.random import randn
>>> from pandas import DataFrame
>>> df = DataFrame(randn(10, 2), columns=list('ab'))
>>> df.query('a > b')
>>> df[df.a > df.b] # same result as the previous expression
"""
kwargs['level'] = kwargs.pop('level', 0) + 1
res = self.eval(expr, **kwargs)
try:
return self.loc[res]
except ValueError:
# when res is multi-dimensional loc raises, but this is sometimes a
# valid query
return self[res]
def eval(self, expr, **kwargs):
"""Evaluate an expression in the context of the calling DataFrame
instance.
Parameters
----------
expr : string
The expression string to evaluate.
kwargs : dict
See the documentation for :func:`~pandas.eval` for complete details
on the keyword arguments accepted by
:meth:`~pandas.DataFrame.query`.
Returns
-------
ret : ndarray, scalar, or pandas object
See Also
--------
pandas.DataFrame.query
pandas.eval
Notes
-----
For more details see the API documentation for :func:`~pandas.eval`.
For detailed examples see :ref:`enhancing performance with eval
<enhancingperf.eval>`.
Examples
--------
>>> from numpy.random import randn
>>> from pandas import DataFrame
>>> df = DataFrame(randn(10, 2), columns=list('ab'))
>>> df.eval('a + b')
>>> df.eval('c = a + b')
"""
resolvers = kwargs.pop('resolvers', None)
kwargs['level'] = kwargs.pop('level', 0) + 1
if resolvers is None:
index_resolvers = self._get_index_resolvers()
resolvers = dict(self.iteritems()), index_resolvers
kwargs['target'] = self
kwargs['resolvers'] = kwargs.get('resolvers', ()) + resolvers
return _eval(expr, **kwargs)
def select_dtypes(self, include=None, exclude=None):
"""Return a subset of a DataFrame including/excluding columns based on
their ``dtype``.
Parameters
----------
include, exclude : list-like
A list of dtypes or strings to be included/excluded. You must pass
in a non-empty sequence for at least one of these.
Raises
------
ValueError
* If both of ``include`` and ``exclude`` are empty
* If ``include`` and ``exclude`` have overlapping elements
* If any kind of string dtype is passed in.
TypeError
* If either of ``include`` or ``exclude`` is not a sequence
Returns
-------
subset : DataFrame
The subset of the frame including the dtypes in ``include`` and
excluding the dtypes in ``exclude``.
Notes
-----
* To select all *numeric* types use the numpy dtype ``numpy.number``
* To select strings you must use the ``object`` dtype, but note that
this will return *all* object dtype columns
* See the `numpy dtype hierarchy
<http://docs.scipy.org/doc/numpy/reference/arrays.scalars.html>`__
* To select Pandas categorical dtypes, use 'category'
Examples
--------
>>> df = pd.DataFrame({'a': np.random.randn(6).astype('f4'),
... 'b': [True, False] * 3,
... 'c': [1.0, 2.0] * 3})
>>> df
a b c
0 0.3962 True 1
1 0.1459 False 2
2 0.2623 True 1
3 0.0764 False 2
4 -0.9703 True 1
5 -1.2094 False 2
>>> df.select_dtypes(include=['float64'])
c
0 1
1 2
2 1
3 2
4 1
5 2
>>> df.select_dtypes(exclude=['floating'])
b
0 True
1 False
2 True
3 False
4 True
5 False
"""
include, exclude = include or (), exclude or ()
if not (com.is_list_like(include) and com.is_list_like(exclude)):
raise TypeError('include and exclude must both be non-string'
' sequences')
selection = tuple(map(frozenset, (include, exclude)))
if not any(selection):
raise ValueError('at least one of include or exclude must be '
'nonempty')
# convert the myriad valid dtypes object to a single representation
include, exclude = map(lambda x:
frozenset(map(com._get_dtype_from_object, x)),
selection)
for dtypes in (include, exclude):
com._invalidate_string_dtypes(dtypes)
# can't both include AND exclude!
if not include.isdisjoint(exclude):
raise ValueError('include and exclude overlap on %s'
% (include & exclude))
# empty include/exclude -> defaults to True
# three cases (we've already raised if both are empty)
# case 1: empty include, nonempty exclude
# we have True, True, ... True for include, same for exclude
# in the loop below we get the excluded
# and when we call '&' below we get only the excluded
# case 2: nonempty include, empty exclude
# same as case 1, but with include
# case 3: both nonempty
# the "union" of the logic of case 1 and case 2:
# we get the included and excluded, and return their logical and
include_these = Series(not bool(include), index=self.columns)
exclude_these = Series(not bool(exclude), index=self.columns)
def is_dtype_instance_mapper(column, dtype):
return column, functools.partial(issubclass, dtype.type)
for column, f in itertools.starmap(is_dtype_instance_mapper,
self.dtypes.iteritems()):
if include: # checks for the case of empty include or exclude
include_these[column] = any(map(f, include))
if exclude:
exclude_these[column] = not any(map(f, exclude))
dtype_indexer = include_these & exclude_these
return self.loc[com._get_info_slice(self, dtype_indexer)]
def _box_item_values(self, key, values):
items = self.columns[self.columns.get_loc(key)]
if values.ndim == 2:
return self._constructor(values.T, columns=items, index=self.index)
else:
return self._box_col_values(values, items)
def _box_col_values(self, values, items):
""" provide boxed values for a column """
return self._constructor_sliced.from_array(values, index=self.index,
name=items, fastpath=True)
def __setitem__(self, key, value):
# see if we can slice the rows
indexer = convert_to_index_sliceable(self, key)
if indexer is not None:
return self._setitem_slice(indexer, value)
if isinstance(key, (Series, np.ndarray, list, Index)):
self._setitem_array(key, value)
elif isinstance(key, DataFrame):
self._setitem_frame(key, value)
else:
# set column
self._set_item(key, value)
def _setitem_slice(self, key, value):
self._check_setitem_copy()
self.ix._setitem_with_indexer(key, value)
def _setitem_array(self, key, value):
# also raises Exception if object array with NA values
if com.is_bool_indexer(key):
if len(key) != len(self.index):
raise ValueError('Item wrong length %d instead of %d!' %
(len(key), len(self.index)))
key = check_bool_indexer(self.index, key)
indexer = key.nonzero()[0]
self._check_setitem_copy()
self.ix._setitem_with_indexer(indexer, value)
else:
if isinstance(value, DataFrame):
if len(value.columns) != len(key):
raise ValueError('Columns must be same length as key')
for k1, k2 in zip(key, value.columns):
self[k1] = value[k2]
else:
indexer = self.ix._convert_to_indexer(key, axis=1)
self._check_setitem_copy()
self.ix._setitem_with_indexer((slice(None), indexer), value)
def _setitem_frame(self, key, value):
# support boolean setting with DataFrame input, e.g.
# df[df > df2] = 0
if key.values.dtype != np.bool_:
raise TypeError('Must pass DataFrame with boolean values only')
self._check_inplace_setting(value)
self._check_setitem_copy()
self.where(-key, value, inplace=True)
def _ensure_valid_index(self, value):
"""
ensure that if we don't have an index, that we can create one from the
passed value
"""
if not len(self.index):
# GH5632, make sure that we are a Series convertible
if is_list_like(value):
try:
value = Series(value)
except:
pass
if not isinstance(value, Series):
raise ValueError('Cannot set a frame with no defined index '
'and a value that cannot be converted to a '
'Series')
self._data = self._data.reindex_axis(value.index.copy(), axis=1,
fill_value=np.nan)
# we are a scalar
# noop
else:
pass
def _set_item(self, key, value):
"""
Add series to DataFrame in specified column.
If series is a numpy-array (not a Series/TimeSeries), it must be the
same length as the DataFrames index or an error will be thrown.
Series/TimeSeries will be conformed to the DataFrames index to
ensure homogeneity.
"""
self._ensure_valid_index(value)
value = self._sanitize_column(key, value)
NDFrame._set_item(self, key, value)
# check if we are modifying a copy
# try to set first as we want an invalid
# value exeption to occur first
if len(self):
self._check_setitem_copy()
def insert(self, loc, column, value, allow_duplicates=False):
"""
Insert column into DataFrame at specified location.
If `allow_duplicates` is False, raises Exception if column
is already contained in the DataFrame.
Parameters
----------
loc : int
Must have 0 <= loc <= len(columns)
column : object
value : int, Series, or array-like
"""
self._ensure_valid_index(value)
value = self._sanitize_column(column, value)
self._data.insert(
loc, column, value, allow_duplicates=allow_duplicates)
def assign(self, **kwargs):
"""
Assign new columns to a DataFrame, returning a new object
(a copy) with all the original columns in addition to the new ones.
.. versionadded:: 0.16.0
Parameters
----------
kwargs : keyword, value pairs
keywords are the column names. If the values are
callable, they are computed on the DataFrame and
assigned to the new columns. If the values are
not callable, (e.g. a Series, scalar, or array),
they are simply assigned.
Returns
-------
df : DataFrame
A new DataFrame with the new columns in addition to
all the existing columns.
Notes
-----
Since ``kwargs`` is a dictionary, the order of your
arguments may not be preserved, and so the order of the
new columns is not well defined. Assigning multiple
columns within the same ``assign`` is possible, but you cannot
reference other columns created within the same ``assign`` call.
Examples
--------
>>> df = DataFrame({'A': range(1, 11), 'B': np.random.randn(10)})
Where the value is a callable, evaluated on `df`:
>>> df.assign(ln_A = lambda x: np.log(x.A))
A B ln_A
0 1 0.426905 0.000000
1 2 -0.780949 0.693147
2 3 -0.418711 1.098612
3 4 -0.269708 1.386294
4 5 -0.274002 1.609438
5 6 -0.500792 1.791759
6 7 1.649697 1.945910
7 8 -1.495604 2.079442
8 9 0.549296 2.197225
9 10 -0.758542 2.302585
Where the value already exists and is inserted:
>>> newcol = np.log(df['A'])
>>> df.assign(ln_A=newcol)
A B ln_A
0 1 0.426905 0.000000
1 2 -0.780949 0.693147
2 3 -0.418711 1.098612
3 4 -0.269708 1.386294
4 5 -0.274002 1.609438
5 6 -0.500792 1.791759
6 7 1.649697 1.945910
7 8 -1.495604 2.079442
8 9 0.549296 2.197225
9 10 -0.758542 2.302585
"""
data = self.copy()
# do all calculations first...
results = {}
for k, v in kwargs.items():
if callable(v):
results[k] = v(data)
else:
results[k] = v
# ... and then assign
for k, v in results.items():
data[k] = v
return data
def _sanitize_column(self, key, value):
# Need to make sure new columns (which go into the BlockManager as new
# blocks) are always copied
def reindexer(value):
# reindex if necessary
if value.index.equals(self.index) or not len(self.index):
value = value.values.copy()
else:
# GH 4107
try:
value = value.reindex(self.index).values
except Exception as e:
# duplicate axis
if not value.index.is_unique:
raise e
# other
raise TypeError('incompatible index of inserted column '
'with frame index')
return value
if isinstance(value, Series):
value = reindexer(value)
elif isinstance(value, DataFrame):
# align right-hand-side columns if self.columns
# is multi-index and self[key] is a sub-frame
if isinstance(self.columns, MultiIndex) and key in self.columns:
loc = self.columns.get_loc(key)
if isinstance(loc, (slice, Series, np.ndarray, Index)):
cols = maybe_droplevels(self.columns[loc], key)
if len(cols) and not cols.equals(value.columns):
value = value.reindex_axis(cols, axis=1)
# now align rows
value = reindexer(value).T
elif isinstance(value, Categorical):
value = value.copy()
elif (isinstance(value, Index) or is_sequence(value)):
from pandas.core.series import _sanitize_index
# turn me into an ndarray
value = _sanitize_index(value, self.index, copy=False)
if not isinstance(value, (np.ndarray, Index)):
if isinstance(value, list) and len(value) > 0:
value = com._possibly_convert_platform(value)
else:
value = com._asarray_tuplesafe(value)
elif value.ndim == 2:
value = value.copy().T
else:
value = value.copy()
# possibly infer to datetimelike
if is_object_dtype(value.dtype):
value = _possibly_infer_to_datetimelike(value.ravel()).reshape(value.shape)
else:
# upcast the scalar
dtype, value = _infer_dtype_from_scalar(value)
value = np.repeat(value, len(self.index)).astype(dtype)
value = com._possibly_cast_to_datetime(value, dtype)
# return unconsolidatables directly
if isinstance(value, (Categorical, SparseArray)):
return value
# broadcast across multiple columns if necessary
if key in self.columns and value.ndim == 1:
if not self.columns.is_unique or isinstance(self.columns,
MultiIndex):
existing_piece = self[key]
if isinstance(existing_piece, DataFrame):
value = np.tile(value, (len(existing_piece.columns), 1))
return np.atleast_2d(np.asarray(value))
@property
def _series(self):
result = {}
for idx, item in enumerate(self.columns):
result[item] = Series(self._data.iget(idx), index=self.index,
name=item)
return result
def lookup(self, row_labels, col_labels):
"""Label-based "fancy indexing" function for DataFrame.
Given equal-length arrays of row and column labels, return an
array of the values corresponding to each (row, col) pair.
Parameters
----------
row_labels : sequence
The row labels to use for lookup
col_labels : sequence
The column labels to use for lookup
Notes
-----
Akin to::
result = []
for row, col in zip(row_labels, col_labels):
result.append(df.get_value(row, col))
Examples
--------
values : ndarray
The found values
"""
n = len(row_labels)
if n != len(col_labels):
raise ValueError('Row labels must have same size as column labels')
thresh = 1000
if not self._is_mixed_type or n > thresh:
values = self.values
ridx = self.index.get_indexer(row_labels)
cidx = self.columns.get_indexer(col_labels)
if (ridx == -1).any():
raise KeyError('One or more row labels was not found')
if (cidx == -1).any():
raise KeyError('One or more column labels was not found')
flat_index = ridx * len(self.columns) + cidx
result = values.flat[flat_index]
else:
result = np.empty(n, dtype='O')
for i, (r, c) in enumerate(zip(row_labels, col_labels)):
result[i] = self.get_value(r, c)
if is_object_dtype(result):
result = lib.maybe_convert_objects(result)
return result
#----------------------------------------------------------------------
# Reindexing and alignment
def _reindex_axes(self, axes, level, limit, method, fill_value, copy):
frame = self
columns = axes['columns']
if columns is not None:
frame = frame._reindex_columns(columns, copy, level, fill_value,
limit)
index = axes['index']
if index is not None:
frame = frame._reindex_index(index, method, copy, level,
fill_value, limit)
return frame
def _reindex_index(self, new_index, method, copy, level, fill_value=NA,
limit=None):
new_index, indexer = self.index.reindex(new_index, method, level,
limit=limit)
return self._reindex_with_indexers({0: [new_index, indexer]},
copy=copy, fill_value=fill_value,
allow_dups=False)
def _reindex_columns(self, new_columns, copy, level, fill_value=NA,
limit=None):
new_columns, indexer = self.columns.reindex(new_columns, level=level,
limit=limit)
return self._reindex_with_indexers({1: [new_columns, indexer]},
copy=copy, fill_value=fill_value,
allow_dups=False)
def _reindex_multi(self, axes, copy, fill_value):
""" we are guaranteed non-Nones in the axes! """
new_index, row_indexer = self.index.reindex(axes['index'])
new_columns, col_indexer = self.columns.reindex(axes['columns'])
if row_indexer is not None and col_indexer is not None:
indexer = row_indexer, col_indexer
new_values = com.take_2d_multi(self.values, indexer,
fill_value=fill_value)
return self._constructor(new_values, index=new_index,
columns=new_columns)
else:
return self._reindex_with_indexers({0: [new_index, row_indexer],
1: [new_columns, col_indexer]},
copy=copy,
fill_value=fill_value)
@Appender(_shared_docs['reindex'] % _shared_doc_kwargs)
def reindex(self, index=None, columns=None, **kwargs):
return super(DataFrame, self).reindex(index=index, columns=columns,
**kwargs)
@Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs)
def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True,
limit=None, fill_value=np.nan):
return super(DataFrame, self).reindex_axis(labels=labels, axis=axis,
method=method, level=level,
copy=copy, limit=limit,
fill_value=fill_value)
@Appender(_shared_docs['rename'] % _shared_doc_kwargs)
def rename(self, index=None, columns=None, **kwargs):
return super(DataFrame, self).rename(index=index, columns=columns,
**kwargs)
def set_index(self, keys, drop=True, append=False, inplace=False,
verify_integrity=False):
"""
Set the DataFrame index (row labels) using one or more existing
columns. By default yields a new object.
Parameters
----------
keys : column label or list of column labels / arrays
drop : boolean, default True
Delete columns to be used as the new index
append : boolean, default False
Whether to append columns to existing index
inplace : boolean, default False
Modify the DataFrame in place (do not create a new object)
verify_integrity : boolean, default False
Check the new index for duplicates. Otherwise defer the check until
necessary. Setting to False will improve the performance of this
method
Examples
--------
>>> indexed_df = df.set_index(['A', 'B'])
>>> indexed_df2 = df.set_index(['A', [0, 1, 2, 0, 1, 2]])
>>> indexed_df3 = df.set_index([[0, 1, 2, 0, 1, 2]])
Returns
-------
dataframe : DataFrame
"""
if not isinstance(keys, list):
keys = [keys]
if inplace:
frame = self
else:
frame = self.copy()
arrays = []
names = []
if append:
names = [x for x in self.index.names]
if isinstance(self.index, MultiIndex):
for i in range(self.index.nlevels):
arrays.append(self.index.get_level_values(i))
else:
arrays.append(self.index)
to_remove = []
for col in keys:
if isinstance(col, MultiIndex):
# append all but the last column so we don't have to modify
# the end of this loop
for n in range(col.nlevels - 1):
arrays.append(col.get_level_values(n))
level = col.get_level_values(col.nlevels - 1)
names.extend(col.names)
elif isinstance(col, Series):
level = col.values
names.append(col.name)
elif isinstance(col, Index):
level = col
names.append(col.name)
elif isinstance(col, (list, np.ndarray, Index)):
level = col
names.append(None)
else:
level = frame[col].values
names.append(col)
if drop:
to_remove.append(col)
arrays.append(level)
index = MultiIndex.from_arrays(arrays, names=names)
if verify_integrity and not index.is_unique:
duplicates = index.get_duplicates()
raise ValueError('Index has duplicate keys: %s' % duplicates)
for c in to_remove:
del frame[c]
# clear up memory usage
index._cleanup()
frame.index = index
if not inplace:
return frame
def reset_index(self, level=None, drop=False, inplace=False, col_level=0,
col_fill=''):
"""
For DataFrame with multi-level index, return new DataFrame with
labeling information in the columns under the index names, defaulting
to 'level_0', 'level_1', etc. if any are None. For a standard index,
the index name will be used (if set), otherwise a default 'index' or
'level_0' (if 'index' is already taken) will be used.
Parameters
----------
level : int, str, tuple, or list, default None
Only remove the given levels from the index. Removes all levels by
default
drop : boolean, default False
Do not try to insert index into dataframe columns. This resets
the index to the default integer index.
inplace : boolean, default False
Modify the DataFrame in place (do not create a new object)
col_level : int or str, default 0
If the columns have multiple levels, determines which level the
labels are inserted into. By default it is inserted into the first
level.
col_fill : object, default ''
If the columns have multiple levels, determines how the other
levels are named. If None then the index name is repeated.
Returns
-------
resetted : DataFrame
"""
if inplace:
new_obj = self
else:
new_obj = self.copy()
def _maybe_casted_values(index, labels=None):
|
new_index = np.arange(len(new_obj),dtype='int64')
if isinstance(self.index, MultiIndex):
if level is not None:
if not isinstance(level, (tuple, list)):
level = [level]
level = [self.index._get_level_number(lev) for lev in level]
if len(level) < len(self.index.levels):
new_index = self.index.droplevel(level)
if not drop:
names = self.index.names
zipped = lzip(self.index.levels, self.index.labels)
multi_col = isinstance(self.columns, MultiIndex)
for i, (lev, lab) in reversed(list(enumerate(zipped))):
col_name = names[i]
if col_name is None:
col_name = 'level_%d' % i
if multi_col:
if col_fill is None:
col_name = tuple([col_name] *
self.columns.nlevels)
else:
name_lst = [col_fill] * self.columns.nlevels
lev_num = self.columns._get_level_number(col_level)
name_lst[lev_num] = col_name
col_name = tuple(name_lst)
# to ndarray and maybe infer different dtype
level_values = _maybe_casted_values(lev, lab)
if level is None or i in level:
new_obj.insert(0, col_name, level_values)
elif not drop:
name = self.index.name
if name is None or name == 'index':
name = 'index' if 'index' not in self else 'level_0'
if isinstance(self.columns, MultiIndex):
if col_fill is None:
name = tuple([name] * self.columns.nlevels)
else:
name_lst = [col_fill] * self.columns.nlevels
lev_num = self.columns._get_level_number(col_level)
name_lst[lev_num] = name
name = tuple(name_lst)
values = _maybe_casted_values(self.index)
new_obj.insert(0, name, values)
new_obj.index = new_index
if not inplace:
return new_obj
#----------------------------------------------------------------------
# Reindex-based selection methods
def dropna(self, axis=0, how='any', thresh=None, subset=None,
inplace=False):
"""
Return object with labels on given axis omitted where alternately any
or all of the data are missing
Parameters
----------
axis : {0, 1}, or tuple/list thereof
Pass tuple or list to drop on multiple axes
how : {'any', 'all'}
* any : if any NA values are present, drop that label
* all : if all values are NA, drop that label
thresh : int, default None
int value : require that many non-NA values
subset : array-like
Labels along other axis to consider, e.g. if you are dropping rows
these would be a list of columns to include
inplace : boolean, defalt False
If True, do operation inplace and return None.
Returns
-------
dropped : DataFrame
"""
if isinstance(axis, (tuple, list)):
result = self
for ax in axis:
result = result.dropna(how=how, thresh=thresh,
subset=subset, axis=ax)
else:
axis = self._get_axis_number(axis)
agg_axis = 1 - axis
agg_obj = self
if subset is not None:
ax = self._get_axis(agg_axis)
indices = ax.get_indexer_for(subset)
check = indices == -1
if check.any():
raise KeyError(list(np.compress(check,subset)))
agg_obj = self.take(indices,axis=agg_axis)
count = agg_obj.count(axis=agg_axis)
if thresh is not None:
mask = count >= thresh
elif how == 'any':
mask = count == len(agg_obj._get_axis(agg_axis))
elif how == 'all':
mask = count > 0
else:
if how is not None:
raise ValueError('invalid how option: %s' % how)
else:
raise TypeError('must specify how or thresh')
result = self.take(mask.nonzero()[0], axis=axis, convert=False)
if inplace:
self._update_inplace(result)
else:
return result
@deprecate_kwarg(old_arg_name='cols', new_arg_name='subset')
def drop_duplicates(self, subset=None, take_last=False, inplace=False):
"""
Return DataFrame with duplicate rows removed, optionally only
considering certain columns
Parameters
----------
subset : column label or sequence of labels, optional
Only consider certain columns for identifying duplicates, by
default use all of the columns
take_last : boolean, default False
Take the last observed row in a row. Defaults to the first row
inplace : boolean, default False
Whether to drop duplicates in place or to return a copy
cols : kwargs only argument of subset [deprecated]
Returns
-------
deduplicated : DataFrame
"""
duplicated = self.duplicated(subset, take_last=take_last)
if inplace:
inds, = (-duplicated).nonzero()
new_data = self._data.take(inds)
self._update_inplace(new_data)
else:
return self[-duplicated]
@deprecate_kwarg(old_arg_name='cols', new_arg_name='subset')
def duplicated(self, subset=None, take_last=False):
"""
Return boolean Series denoting duplicate rows, optionally only
considering certain columns
Parameters
----------
subset : column label or sequence of labels, optional
Only consider certain columns for identifying duplicates, by
default use all of the columns
take_last : boolean, default False
For a set of distinct duplicate rows, flag all but the last row as
duplicated. Default is for all but the first row to be flagged
cols : kwargs only argument of subset [deprecated]
Returns
-------
duplicated : Series
"""
from pandas.core.groupby import get_group_index
from pandas.core.algorithms import factorize
from pandas.hashtable import duplicated_int64, _SIZE_HINT_LIMIT
def f(vals):
labels, shape = factorize(vals, size_hint=min(len(self), _SIZE_HINT_LIMIT))
return labels.astype('i8',copy=False), len(shape)
if subset is None:
subset = self.columns
elif not np.iterable(subset) or \
isinstance(subset, compat.string_types) or \
isinstance(subset, tuple) and subset in self.columns:
subset = subset,
vals = (self[col].values for col in subset)
labels, shape = map(list, zip( * map(f, vals)))
ids = get_group_index(labels, shape, sort=False, xnull=False)
return Series(duplicated_int64(ids, take_last), index=self.index)
#----------------------------------------------------------------------
# Sorting
def sort(self, columns=None, axis=0, ascending=True,
inplace=False, kind='quicksort', na_position='last'):
"""
Sort DataFrame either by labels (along either axis) or by the values in
column(s)
Parameters
----------
columns : object
Column name(s) in frame. Accepts a column name or a list
for a nested sort. A tuple will be interpreted as the
levels of a multi-index.
ascending : boolean or list, default True
Sort ascending vs. descending. Specify list for multiple sort
orders
axis : {0, 1}
Sort index/rows versus columns
inplace : boolean, default False
Sort the DataFrame without creating a new instance
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
This option is only applied when sorting on a single column or label.
na_position : {'first', 'last'} (optional, default='last')
'first' puts NaNs at the beginning
'last' puts NaNs at the end
Examples
--------
>>> result = df.sort(['A', 'B'], ascending=[1, 0])
Returns
-------
sorted : DataFrame
"""
return self.sort_index(by=columns, axis=axis, ascending=ascending,
inplace=inplace, kind=kind, na_position=na_position)
def sort_index(self, axis=0, by=None, ascending=True, inplace=False,
kind='quicksort', na_position='last'):
"""
Sort DataFrame either by labels (along either axis) or by the values in
a column
Parameters
----------
axis : {0, 1}
Sort index/rows versus columns
by : object
Column name(s) in frame. Accepts a column name or a list
for a nested sort. A tuple will be interpreted as the
levels of a multi-index.
ascending : boolean or list, default True
Sort ascending vs. descending. Specify list for multiple sort
orders
inplace : boolean, default False
Sort the DataFrame without creating a new instance
na_position : {'first', 'last'} (optional, default='last')
'first' puts NaNs at the beginning
'last' puts NaNs at the end
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
This option is only applied when sorting on a single column or label.
Examples
--------
>>> result = df.sort_index(by=['A', 'B'], ascending=[True, False])
Returns
-------
sorted : DataFrame
"""
from pandas.core.groupby import _lexsort_indexer, _nargsort
axis = self._get_axis_number(axis)
if axis not in [0, 1]: # pragma: no cover
raise AssertionError('Axis must be 0 or 1, got %s' % str(axis))
labels = self._get_axis(axis)
if by is not None:
if axis != 0:
raise ValueError('When sorting by column, axis must be 0 '
'(rows)')
if not isinstance(by, list):
by = [by]
if com.is_sequence(ascending) and len(by) != len(ascending):
raise ValueError('Length of ascending (%d) != length of by'
' (%d)' % (len(ascending), len(by)))
if len(by) > 1:
def trans(v):
if com.needs_i8_conversion(v):
return v.view('i8')
return v
keys = []
for x in by:
k = self[x].values
if k.ndim == 2:
raise ValueError('Cannot sort by duplicate column %s' % str(x))
keys.append(trans(k))
indexer = _lexsort_indexer(keys, orders=ascending,
na_position=na_position)
indexer = com._ensure_platform_int(indexer)
else:
by = by[0]
k = self[by].values
if k.ndim == 2:
# try to be helpful
if isinstance(self.columns, MultiIndex):
raise ValueError('Cannot sort by column %s in a multi-index'
' you need to explicity provide all the levels'
% str(by))
raise ValueError('Cannot sort by duplicate column %s'
% str(by))
if isinstance(ascending, (tuple, list)):
ascending = ascending[0]
indexer = _nargsort(k, kind=kind, ascending=ascending,
na_position=na_position)
elif isinstance(labels, MultiIndex):
# make sure that the axis is lexsorted to start
# if not we need to reconstruct to get the correct indexer
if not labels.is_lexsorted():
labels = MultiIndex.from_tuples(labels.values)
indexer = _lexsort_indexer(labels.labels, orders=ascending,
na_position=na_position)
indexer = com._ensure_platform_int(indexer)
else:
indexer = _nargsort(labels, kind=kind, ascending=ascending,
na_position=na_position)
bm_axis = self._get_block_manager_axis(axis)
new_data = self._data.take(indexer, axis=bm_axis,
convert=False, verify=False)
if inplace:
return self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
def sortlevel(self, level=0, axis=0, ascending=True,
inplace=False, sort_remaining=True):
"""
Sort multilevel index by chosen axis and primary level. Data will be
lexicographically sorted by the chosen level followed by the other
levels (in order)
Parameters
----------
level : int
axis : {0, 1}
ascending : boolean, default True
inplace : boolean, default False
Sort the DataFrame without creating a new instance
sort_remaining : boolean, default True
Sort by the other levels too.
Returns
-------
sorted : DataFrame
"""
axis = self._get_axis_number(axis)
the_axis = self._get_axis(axis)
if not isinstance(the_axis, MultiIndex):
raise TypeError('can only sort by level with a hierarchical index')
new_axis, indexer = the_axis.sortlevel(level, ascending=ascending,
sort_remaining=sort_remaining)
if self._is_mixed_type and not inplace:
ax = 'index' if axis == 0 else 'columns'
if new_axis.is_unique:
return self.reindex(**{ax: new_axis})
else:
return self.take(indexer, axis=axis, convert=False)
bm_axis = self._get_block_manager_axis(axis)
new_data = self._data.take(indexer, axis=bm_axis,
convert=False, verify=False)
if inplace:
return self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
def swaplevel(self, i, j, axis=0):
"""
Swap levels i and j in a MultiIndex on a particular axis
Parameters
----------
i, j : int, string (can be mixed)
Level of index to be swapped. Can pass level name as string.
Returns
-------
swapped : type of caller (new object)
"""
result = self.copy()
axis = self._get_axis_number(axis)
if axis == 0:
result.index = result.index.swaplevel(i, j)
else:
result.columns = result.columns.swaplevel(i, j)
return result
def reorder_levels(self, order, axis=0):
"""
Rearrange index levels using input order.
May not drop or duplicate levels
Parameters
----------
order : list of int or list of str
List representing new level order. Reference level by number
(position) or by key (label).
axis : int
Where to reorder levels.
Returns
-------
type of caller (new object)
"""
axis = self._get_axis_number(axis)
if not isinstance(self._get_axis(axis),
MultiIndex): # pragma: no cover
raise TypeError('Can only reorder levels on a hierarchical axis.')
result = self.copy()
if axis == 0:
result.index = result.index.reorder_levels(order)
else:
result.columns = result.columns.reorder_levels(order)
return result
#----------------------------------------------------------------------
# Arithmetic / combination related
def _combine_frame(self, other, func, fill_value=None, level=None):
this, other = self.align(other, join='outer', level=level, copy=False)
new_index, new_columns = this.index, this.columns
def _arith_op(left, right):
if fill_value is not None:
left_mask = isnull(left)
right_mask = isnull(right)
left = left.copy()
right = right.copy()
# one but not both
mask = left_mask ^ right_mask
left[left_mask & mask] = fill_value
right[right_mask & mask] = fill_value
return func(left, right)
if this._is_mixed_type or other._is_mixed_type:
# unique
if this.columns.is_unique:
def f(col):
r = _arith_op(this[col].values, other[col].values)
return self._constructor_sliced(r, index=new_index,
dtype=r.dtype)
result = dict([(col, f(col)) for col in this])
# non-unique
else:
def f(i):
r = _arith_op(this.iloc[:, i].values,
other.iloc[:, i].values)
return self._constructor_sliced(r, index=new_index,
dtype=r.dtype)
result = dict([
(i, f(i)) for i, col in enumerate(this.columns)
])
result = self._constructor(result, index=new_index, copy=False)
result.columns = new_columns
return result
else:
result = _arith_op(this.values, other.values)
return self._constructor(result, index=new_index,
columns=new_columns, copy=False)
def _combine_series(self, other, func, fill_value=None, axis=None,
level=None):
if axis is not None:
axis = self._get_axis_name(axis)
if axis == 'index':
return self._combine_match_index(other, func, level=level, fill_value=fill_value)
else:
return self._combine_match_columns(other, func, level=level, fill_value=fill_value)
return self._combine_series_infer(other, func, level=level, fill_value=fill_value)
def _combine_series_infer(self, other, func, level=None, fill_value=None):
if len(other) == 0:
return self * NA
if len(self) == 0:
# Ambiguous case, use _series so works with DataFrame
return self._constructor(data=self._series, index=self.index,
columns=self.columns)
# teeny hack because one does DataFrame + TimeSeries all the time
if self.index.is_all_dates and other.index.is_all_dates:
warnings.warn(("TimeSeries broadcasting along DataFrame index "
"by default is deprecated. Please use "
"DataFrame.<op> to explicitly broadcast arithmetic "
"operations along the index"),
FutureWarning)
return self._combine_match_index(other, func, level=level, fill_value=fill_value)
else:
return self._combine_match_columns(other, func, level=level, fill_value=fill_value)
def _combine_match_index(self, other, func, level=None, fill_value=None):
left, right = self.align(other, join='outer', axis=0, level=level, copy=False)
if fill_value is not None:
raise NotImplementedError("fill_value %r not supported." %
fill_value)
return self._constructor(func(left.values.T, right.values).T,
index=left.index,
columns=self.columns, copy=False)
def _combine_match_columns(self, other, func, level=None, fill_value=None):
left, right = self.align(other, join='outer', axis=1, level=level, copy=False)
if fill_value is not None:
raise NotImplementedError("fill_value %r not supported" %
fill_value)
new_data = left._data.eval(
func=func, other=right, axes=[left.columns, self.index])
return self._constructor(new_data)
def _combine_const(self, other, func, raise_on_error=True):
if self.empty:
return self
new_data = self._data.eval(func=func, other=other, raise_on_error=raise_on_error)
return self._constructor(new_data)
def _compare_frame_evaluate(self, other, func, str_rep):
# unique
if self.columns.is_unique:
def _compare(a, b):
return dict([(col, func(a[col], b[col])) for col in a.columns])
new_data = expressions.evaluate(_compare, str_rep, self, other)
return self._constructor(data=new_data, index=self.index,
columns=self.columns, copy=False)
# non-unique
else:
def _compare(a, b):
return dict([(i, func(a.iloc[:, i], b.iloc[:, i]))
for i, col in enumerate(a.columns)])
new_data = expressions.evaluate(_compare, str_rep, self, other)
result = self._constructor(data=new_data, index=self.index,
copy=False)
result.columns = self.columns
return result
def _compare_frame(self, other, func, str_rep):
if not self._indexed_same(other):
raise ValueError('Can only compare identically-labeled '
'DataFrame objects')
return self._compare_frame_evaluate(other, func, str_rep)
def _flex_compare_frame(self, other, func, str_rep, level):
if not self._indexed_same(other):
self, other = self.align(other, 'outer', level=level, copy=False)
return self._compare_frame_evaluate(other, func, str_rep)
def combine(self, other, func, fill_value=None, overwrite=True):
"""
Add two DataFrame objects and do not propagate NaN values, so if for a
(column, time) one frame is missing a value, it will default to the
other frame's value (which might be NaN as well)
Parameters
----------
other : DataFrame
func : function
fill_value : scalar value
overwrite : boolean, default True
If True then overwrite values for common keys in the calling frame
Returns
-------
result : DataFrame
"""
other_idxlen = len(other.index) # save for compare
this, other = self.align(other, copy=False)
new_index = this.index
if other.empty and len(new_index) == len(self.index):
return self.copy()
if self.empty and len(other) == other_idxlen:
return other.copy()
# sorts if possible
new_columns = this.columns.union(other.columns)
do_fill = fill_value is not None
result = {}
for col in new_columns:
series = this[col]
otherSeries = other[col]
this_dtype = series.dtype
other_dtype = otherSeries.dtype
this_mask = isnull(series)
other_mask = isnull(otherSeries)
# don't overwrite columns unecessarily
# DO propogate if this column is not in the intersection
if not overwrite and other_mask.all():
result[col] = this[col].copy()
continue
if do_fill:
series = series.copy()
otherSeries = otherSeries.copy()
series[this_mask] = fill_value
otherSeries[other_mask] = fill_value
# if we have different dtypes, possibily promote
new_dtype = this_dtype
if this_dtype != other_dtype:
new_dtype = com._lcd_dtypes(this_dtype, other_dtype)
series = series.astype(new_dtype)
otherSeries = otherSeries.astype(new_dtype)
# see if we need to be represented as i8 (datetimelike)
# try to keep us at this dtype
needs_i8_conversion = com.needs_i8_conversion(new_dtype)
if needs_i8_conversion:
this_dtype = new_dtype
arr = func(series, otherSeries, True)
else:
arr = func(series, otherSeries)
if do_fill:
arr = com.ensure_float(arr)
arr[this_mask & other_mask] = NA
# try to downcast back to the original dtype
if needs_i8_conversion:
arr = com._possibly_cast_to_datetime(arr, this_dtype)
else:
arr = com._possibly_downcast_to_dtype(arr, this_dtype)
result[col] = arr
# convert_objects just in case
return self._constructor(result,
index=new_index,
columns=new_columns).convert_objects(
convert_dates=True,
copy=False)
def combine_first(self, other):
"""
Combine two DataFrame objects and default to non-null values in frame
calling the method. Result index columns will be the union of the
respective indexes and columns
Parameters
----------
other : DataFrame
Examples
--------
a's values prioritized, use values from b to fill holes:
>>> a.combine_first(b)
Returns
-------
combined : DataFrame
"""
def combiner(x, y, needs_i8_conversion=False):
x_values = x.values if hasattr(x, 'values') else x
y_values = y.values if hasattr(y, 'values') else y
if needs_i8_conversion:
mask = isnull(x)
x_values = x_values.view('i8')
y_values = y_values.view('i8')
else:
mask = isnull(x_values)
return expressions.where(mask, y_values, x_values,
raise_on_error=True)
return self.combine(other, combiner, overwrite=False)
def update(self, other, join='left', overwrite=True, filter_func=None,
raise_conflict=False):
"""
Modify DataFrame in place using non-NA values from passed
DataFrame. Aligns on indices
Parameters
----------
other : DataFrame, or object coercible into a DataFrame
join : {'left'}, default 'left'
overwrite : boolean, default True
If True then overwrite values for common keys in the calling frame
filter_func : callable(1d-array) -> 1d-array<boolean>, default None
Can choose to replace values other than NA. Return True for values
that should be updated
raise_conflict : boolean
If True, will raise an error if the DataFrame and other both
contain data in the same place.
"""
# TODO: Support other joins
if join != 'left': # pragma: no cover
raise NotImplementedError("Only left join is supported")
if not isinstance(other, DataFrame):
other = DataFrame(other)
other = other.reindex_like(self)
for col in self.columns:
this = self[col].values
that = other[col].values
if filter_func is not None:
mask = ~filter_func(this) | isnull(that)
else:
if raise_conflict:
mask_this = notnull(that)
mask_that = notnull(this)
if any(mask_this & mask_that):
raise ValueError("Data overlaps.")
if overwrite:
mask = isnull(that)
# don't overwrite columns unecessarily
if mask.all():
continue
else:
mask = notnull(this)
self[col] = expressions.where(
mask, this, that, raise_on_error=True)
#----------------------------------------------------------------------
# Misc methods
def first_valid_index(self):
"""
Return label for first non-NA/null value
"""
return self.index[self.count(1) > 0][0]
def last_valid_index(self):
"""
Return label for last non-NA/null value
"""
return self.index[self.count(1) > 0][-1]
#----------------------------------------------------------------------
# Data reshaping
def pivot(self, index=None, columns=None, values=None):
"""
Reshape data (produce a "pivot" table) based on column values. Uses
unique values from index / columns to form axes and return either
DataFrame or Panel, depending on whether you request a single value
column (DataFrame) or all columns (Panel)
Parameters
----------
index : string or object
Column name to use to make new frame's index
columns : string or object
Column name to use to make new frame's columns
values : string or object, optional
Column name to use for populating new frame's values
Notes
-----
For finer-tuned control, see hierarchical indexing documentation along
with the related stack/unstack methods
Examples
--------
>>> df
foo bar baz
0 one A 1.
1 one B 2.
2 one C 3.
3 two A 4.
4 two B 5.
5 two C 6.
>>> df.pivot('foo', 'bar', 'baz')
A B C
one 1 2 3
two 4 5 6
>>> df.pivot('foo', 'bar')['baz']
A B C
one 1 2 3
two 4 5 6
Returns
-------
pivoted : DataFrame
If no values column specified, will have hierarchically indexed
columns
"""
from pandas.core.reshape import pivot
return pivot(self, index=index, columns=columns, values=values)
def stack(self, level=-1, dropna=True):
"""
Pivot a level of the (possibly hierarchical) column labels, returning a
DataFrame (or Series in the case of an object with a single level of
column labels) having a hierarchical index with a new inner-most level
of row labels.
The level involved will automatically get sorted.
Parameters
----------
level : int, string, or list of these, default last level
Level(s) to stack, can pass level name
dropna : boolean, default True
Whether to drop rows in the resulting Frame/Series with no valid
values
Examples
----------
>>> s
a b
one 1. 2.
two 3. 4.
>>> s.stack()
one a 1
b 2
two a 3
b 4
Returns
-------
stacked : DataFrame or Series
"""
from pandas.core.reshape import stack, stack_multiple
if isinstance(level, (tuple, list)):
return stack_multiple(self, level, dropna=dropna)
else:
return stack(self, level, dropna=dropna)
def unstack(self, level=-1):
"""
Pivot a level of the (necessarily hierarchical) index labels, returning
a DataFrame having a new level of column labels whose inner-most level
consists of the pivoted index labels. If the index is not a MultiIndex,
the output will be a Series (the analogue of stack when the columns are
not a MultiIndex).
The level involved will automatically get sorted.
Parameters
----------
level : int, string, or list of these, default -1 (last level)
Level(s) of index to unstack, can pass level name
See also
--------
DataFrame.pivot : Pivot a table based on column values.
DataFrame.stack : Pivot a level of the column labels (inverse operation
from `unstack`).
Examples
--------
>>> index = pd.MultiIndex.from_tuples([('one', 'a'), ('one', 'b'),
... ('two', 'a'), ('two', 'b')])
>>> s = pd.Series(np.arange(1.0, 5.0), index=index)
>>> s
one a 1
b 2
two a 3
b 4
dtype: float64
>>> s.unstack(level=-1)
a b
one 1 2
two 3 4
>>> s.unstack(level=0)
one two
a 1 3
b 2 4
>>> df = s.unstack(level=0)
>>> df.unstack()
one a 1.
b 3.
two a 2.
b 4.
Returns
-------
unstacked : DataFrame or Series
"""
from pandas.core.reshape import unstack
return unstack(self, level)
#----------------------------------------------------------------------
# Time series-related
def diff(self, periods=1):
"""
1st discrete difference of object
Parameters
----------
periods : int, default 1
Periods to shift for forming difference
Returns
-------
diffed : DataFrame
"""
new_data = self._data.diff(n=periods)
return self._constructor(new_data)
#----------------------------------------------------------------------
# Function application
def apply(self, func, axis=0, broadcast=False, raw=False, reduce=None,
args=(), **kwds):
"""
Applies function along input axis of DataFrame.
Objects passed to functions are Series objects having index
either the DataFrame's index (axis=0) or the columns (axis=1).
Return type depends on whether passed function aggregates, or the
reduce argument if the DataFrame is empty.
Parameters
----------
func : function
Function to apply to each column/row
axis : {0, 1}
* 0 : apply function to each column
* 1 : apply function to each row
broadcast : boolean, default False
For aggregation functions, return object of same size with values
propagated
reduce : boolean or None, default None
Try to apply reduction procedures. If the DataFrame is empty,
apply will use reduce to determine whether the result should be a
Series or a DataFrame. If reduce is None (the default), apply's
return value will be guessed by calling func an empty Series (note:
while guessing, exceptions raised by func will be ignored). If
reduce is True a Series will always be returned, and if False a
DataFrame will always be returned.
raw : boolean, default False
If False, convert each row or column into a Series. If raw=True the
passed function will receive ndarray objects instead. If you are
just applying a NumPy reduction function this will achieve much
better performance
args : tuple
Positional arguments to pass to function in addition to the
array/series
Additional keyword arguments will be passed as keywords to the function
Notes
-----
In the current implementation apply calls func twice on the
first column/row to decide whether it can take a fast or slow
code path. This can lead to unexpected behavior if func has
side-effects, as they will take effect twice for the first
column/row.
Examples
--------
>>> df.apply(numpy.sqrt) # returns DataFrame
>>> df.apply(numpy.sum, axis=0) # equiv to df.sum(0)
>>> df.apply(numpy.sum, axis=1) # equiv to df.sum(1)
See also
--------
DataFrame.applymap: For elementwise operations
Returns
-------
applied : Series or DataFrame
"""
axis = self._get_axis_number(axis)
if kwds or args and not isinstance(func, np.ufunc):
f = lambda x: func(x, *args, **kwds)
else:
f = func
if len(self.columns) == 0 and len(self.index) == 0:
return self._apply_empty_result(func, axis, reduce, *args, **kwds)
if isinstance(f, np.ufunc):
results = f(self.values)
return self._constructor(data=results, index=self.index,
columns=self.columns, copy=False)
else:
if not broadcast:
if not all(self.shape):
return self._apply_empty_result(func, axis, reduce, *args,
**kwds)
if raw and not self._is_mixed_type:
return self._apply_raw(f, axis)
else:
if reduce is None:
reduce = True
return self._apply_standard(f, axis, reduce=reduce)
else:
return self._apply_broadcast(f, axis)
def _apply_empty_result(self, func, axis, reduce, *args, **kwds):
if reduce is None:
reduce = False
try:
reduce = not isinstance(func(_EMPTY_SERIES, *args, **kwds),
Series)
except Exception:
pass
if reduce:
return Series(NA, index=self._get_agg_axis(axis))
else:
return self.copy()
def _apply_raw(self, func, axis):
try:
result = lib.reduce(self.values, func, axis=axis)
except Exception:
result = np.apply_along_axis(func, axis, self.values)
# TODO: mixed type case
if result.ndim == 2:
return DataFrame(result, index=self.index,
columns=self.columns)
else:
return Series(result, index=self._get_agg_axis(axis))
def _apply_standard(self, func, axis, ignore_failures=False, reduce=True):
# skip if we are mixed datelike and trying reduce across axes
# GH6125
if reduce and axis==1 and self._is_mixed_type and self._is_datelike_mixed_type:
reduce=False
# try to reduce first (by default)
# this only matters if the reduction in values is of different dtype
# e.g. if we want to apply to a SparseFrame, then can't directly reduce
if reduce:
try:
# the is the fast-path
values = self.values
dummy = Series(NA, index=self._get_axis(axis),
dtype=values.dtype)
labels = self._get_agg_axis(axis)
result = lib.reduce(values, func, axis=axis, dummy=dummy,
labels=labels)
return Series(result, index=labels)
except Exception:
pass
dtype = object if self._is_mixed_type else None
if axis == 0:
series_gen = (self.icol(i) for i in range(len(self.columns)))
res_index = self.columns
res_columns = self.index
elif axis == 1:
res_index = self.index
res_columns = self.columns
values = self.values
series_gen = (Series.from_array(arr, index=res_columns, name=name, dtype=dtype)
for i, (arr, name) in
enumerate(zip(values, res_index)))
else: # pragma : no cover
raise AssertionError('Axis must be 0 or 1, got %s' % str(axis))
i = None
keys = []
results = {}
if ignore_failures:
successes = []
for i, v in enumerate(series_gen):
try:
results[i] = func(v)
keys.append(v.name)
successes.append(i)
except Exception:
pass
# so will work with MultiIndex
if len(successes) < len(res_index):
res_index = res_index.take(successes)
else:
try:
for i, v in enumerate(series_gen):
results[i] = func(v)
keys.append(v.name)
except Exception as e:
if hasattr(e, 'args'):
# make sure i is defined
if i is not None:
k = res_index[i]
e.args = e.args + ('occurred at index %s' %
com.pprint_thing(k),)
raise
if len(results) > 0 and is_sequence(results[0]):
if not isinstance(results[0], Series):
index = res_columns
else:
index = None
result = self._constructor(data=results, index=index)
result.columns = res_index
if axis == 1:
result = result.T
result = result.convert_objects(copy=False)
else:
result = Series(results)
result.index = res_index
return result
def _apply_broadcast(self, func, axis):
if axis == 0:
target = self
elif axis == 1:
target = self.T
else: # pragma: no cover
raise AssertionError('Axis must be 0 or 1, got %s' % axis)
result_values = np.empty_like(target.values)
columns = target.columns
for i, col in enumerate(columns):
result_values[:, i] = func(target[col])
result = self._constructor(result_values, index=target.index,
columns=target.columns)
if axis == 1:
result = result.T
return result
def applymap(self, func):
"""
Apply a function to a DataFrame that is intended to operate
elementwise, i.e. like doing map(func, series) for each series in the
DataFrame
Parameters
----------
func : function
Python function, returns a single value from a single value
Returns
-------
applied : DataFrame
See also
--------
DataFrame.apply : For operations on rows/columns
"""
# if we have a dtype == 'M8[ns]', provide boxed values
def infer(x):
if com.needs_i8_conversion(x):
f = com.i8_boxer(x)
x = lib.map_infer(_values_from_object(x), f)
return lib.map_infer(_values_from_object(x), func)
return self.apply(infer)
#----------------------------------------------------------------------
# Merging / joining methods
def append(self, other, ignore_index=False, verify_integrity=False):
"""
Append rows of `other` to the end of this frame, returning a new
object. Columns not in this frame are added as new columns.
Parameters
----------
other : DataFrame or Series/dict-like object, or list of these
The data to append.
ignore_index : boolean, default False
If True, do not use the index labels.
verify_integrity : boolean, default False
If True, raise ValueError on creating index with duplicates.
Returns
-------
appended : DataFrame
Notes
-----
If a list of dict/series is passed and the keys are all contained in the
DataFrame's index, the order of the columns in the resulting DataFrame
will be unchanged.
See also
--------
pandas.concat : General function to concatenate DataFrame, Series
or Panel objects
Examples
--------
>>> df = pd.DataFrame([[1, 2], [3, 4]], columns=list('AB'))
>>> df
A B
0 1 2
1 3 4
>>> df2 = pd.DataFrame([[5, 6], [7, 8]], columns=list('AB'))
>>> df.append(df2)
A B
0 1 2
1 3 4
0 5 6
1 7 8
With `ignore_index` set to True:
>>> df.append(df2, ignore_index=True)
A B
0 1 2
1 3 4
2 5 6
3 7 8
"""
if isinstance(other, (Series, dict)):
if isinstance(other, dict):
other = Series(other)
if other.name is None and not ignore_index:
raise TypeError('Can only append a Series if ignore_index=True'
' or if the Series has a name')
index = None if other.name is None else [other.name]
combined_columns = self.columns.tolist() + self.columns.union(other.index).difference(self.columns).tolist()
other = other.reindex(combined_columns, copy=False)
other = DataFrame(other.values.reshape((1, len(other))),
index=index, columns=combined_columns).convert_objects()
if not self.columns.equals(combined_columns):
self = self.reindex(columns=combined_columns)
elif isinstance(other, list) and not isinstance(other[0], DataFrame):
other = DataFrame(other)
if (self.columns.get_indexer(other.columns) >= 0).all():
other = other.ix[:, self.columns]
from pandas.tools.merge import concat
if isinstance(other, (list, tuple)):
to_concat = [self] + other
else:
to_concat = [self, other]
return concat(to_concat, ignore_index=ignore_index,
verify_integrity=verify_integrity)
def join(self, other, on=None, how='left', lsuffix='', rsuffix='',
sort=False):
"""
Join columns with other DataFrame either on index or on a key
column. Efficiently Join multiple DataFrame objects by index at once by
passing a list.
Parameters
----------
other : DataFrame, Series with name field set, or list of DataFrame
Index should be similar to one of the columns in this one. If a
Series is passed, its name attribute must be set, and that will be
used as the column name in the resulting joined DataFrame
on : column name, tuple/list of column names, or array-like
Column(s) to use for joining, otherwise join on index. If multiples
columns given, the passed DataFrame must have a MultiIndex. Can
pass an array as the join key if not already contained in the
calling DataFrame. Like an Excel VLOOKUP operation
how : {'left', 'right', 'outer', 'inner'}
How to handle indexes of the two objects. Default: 'left'
for joining on index, None otherwise
* left: use calling frame's index
* right: use input frame's index
* outer: form union of indexes
* inner: use intersection of indexes
lsuffix : string
Suffix to use from left frame's overlapping columns
rsuffix : string
Suffix to use from right frame's overlapping columns
sort : boolean, default False
Order result DataFrame lexicographically by the join key. If False,
preserves the index order of the calling (left) DataFrame
Notes
-----
on, lsuffix, and rsuffix options are not supported when passing a list
of DataFrame objects
Returns
-------
joined : DataFrame
"""
# For SparseDataFrame's benefit
return self._join_compat(other, on=on, how=how, lsuffix=lsuffix,
rsuffix=rsuffix, sort=sort)
def _join_compat(self, other, on=None, how='left', lsuffix='', rsuffix='',
sort=False):
from pandas.tools.merge import merge, concat
if isinstance(other, Series):
if other.name is None:
raise ValueError('Other Series must have a name')
other = DataFrame({other.name: other})
if isinstance(other, DataFrame):
return merge(self, other, left_on=on, how=how,
left_index=on is None, right_index=True,
suffixes=(lsuffix, rsuffix), sort=sort)
else:
if on is not None:
raise ValueError('Joining multiple DataFrames only supported'
' for joining on index')
# join indexes only using concat
if how == 'left':
how = 'outer'
join_axes = [self.index]
else:
join_axes = None
frames = [self] + list(other)
can_concat = all(df.index.is_unique for df in frames)
if can_concat:
return concat(frames, axis=1, join=how, join_axes=join_axes,
verify_integrity=True)
joined = frames[0]
for frame in frames[1:]:
joined = merge(joined, frame, how=how,
left_index=True, right_index=True)
return joined
@Substitution('')
@Appender(_merge_doc, indents=2)
def merge(self, right, how='inner', on=None, left_on=None, right_on=None,
left_index=False, right_index=False, sort=False,
suffixes=('_x', '_y'), copy=True):
from pandas.tools.merge import merge
return merge(self, right, how=how, on=on,
left_on=left_on, right_on=right_on,
left_index=left_index, right_index=right_index, sort=sort,
suffixes=suffixes, copy=copy)
#----------------------------------------------------------------------
# Statistical methods, etc.
def corr(self, method='pearson', min_periods=1):
"""
Compute pairwise correlation of columns, excluding NA/null values
Parameters
----------
method : {'pearson', 'kendall', 'spearman'}
* pearson : standard correlation coefficient
* kendall : Kendall Tau correlation coefficient
* spearman : Spearman rank correlation
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result. Currently only available for pearson
and spearman correlation
Returns
-------
y : DataFrame
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
mat = numeric_df.values
if method == 'pearson':
correl = _algos.nancorr(com._ensure_float64(mat),
minp=min_periods)
elif method == 'spearman':
correl = _algos.nancorr_spearman(com._ensure_float64(mat),
minp=min_periods)
else:
if min_periods is None:
min_periods = 1
mat = mat.T
corrf = nanops.get_corr_func(method)
K = len(cols)
correl = np.empty((K, K), dtype=float)
mask = np.isfinite(mat)
for i, ac in enumerate(mat):
for j, bc in enumerate(mat):
valid = mask[i] & mask[j]
if valid.sum() < min_periods:
c = NA
elif not valid.all():
c = corrf(ac[valid], bc[valid])
else:
c = corrf(ac, bc)
correl[i, j] = c
correl[j, i] = c
return self._constructor(correl, index=cols, columns=cols)
def cov(self, min_periods=None):
"""
Compute pairwise covariance of columns, excluding NA/null values
Parameters
----------
min_periods : int, optional
Minimum number of observations required per pair of columns
to have a valid result.
Returns
-------
y : DataFrame
Notes
-----
`y` contains the covariance matrix of the DataFrame's time series.
The covariance is normalized by N-1 (unbiased estimator).
"""
numeric_df = self._get_numeric_data()
cols = numeric_df.columns
mat = numeric_df.values
if notnull(mat).all():
if min_periods is not None and min_periods > len(mat):
baseCov = np.empty((mat.shape[1], mat.shape[1]))
baseCov.fill(np.nan)
else:
baseCov = np.cov(mat.T)
baseCov = baseCov.reshape((len(cols), len(cols)))
else:
baseCov = _algos.nancorr(com._ensure_float64(mat), cov=True,
minp=min_periods)
return self._constructor(baseCov, index=cols, columns=cols)
def corrwith(self, other, axis=0, drop=False):
"""
Compute pairwise correlation between rows or columns of two DataFrame
objects.
Parameters
----------
other : DataFrame
axis : {0, 1}
0 to compute column-wise, 1 for row-wise
drop : boolean, default False
Drop missing indices from result, default returns union of all
Returns
-------
correls : Series
"""
axis = self._get_axis_number(axis)
if isinstance(other, Series):
return self.apply(other.corr, axis=axis)
this = self._get_numeric_data()
other = other._get_numeric_data()
left, right = this.align(other, join='inner', copy=False)
# mask missing values
left = left + right * 0
right = right + left * 0
if axis == 1:
left = left.T
right = right.T
# demeaned data
ldem = left - left.mean()
rdem = right - right.mean()
num = (ldem * rdem).sum()
dom = (left.count() - 1) * left.std() * right.std()
correl = num / dom
if not drop:
raxis = 1 if axis == 0 else 0
result_index = this._get_axis(raxis).union(other._get_axis(raxis))
correl = correl.reindex(result_index)
return correl
#----------------------------------------------------------------------
# ndarray-like stats methods
def count(self, axis=0, level=None, numeric_only=False):
"""
Return Series with number of non-NA/null observations over requested
axis. Works with non-floating point data as well (detects NaN and None)
Parameters
----------
axis : {0, 1}
0 for row-wise, 1 for column-wise
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a DataFrame
numeric_only : boolean, default False
Include only float, int, boolean data
Returns
-------
count : Series (or DataFrame if level specified)
"""
axis = self._get_axis_number(axis)
if level is not None:
return self._count_level(level, axis=axis,
numeric_only=numeric_only)
if numeric_only:
frame = self._get_numeric_data()
else:
frame = self
# GH #423
if len(frame._get_axis(axis)) == 0:
result = Series(0, index=frame._get_agg_axis(axis))
else:
if frame._is_mixed_type:
result = notnull(frame).sum(axis=axis)
else:
counts = notnull(frame.values).sum(axis=axis)
result = Series(counts, index=frame._get_agg_axis(axis))
return result.astype('int64')
def _count_level(self, level, axis=0, numeric_only=False):
if numeric_only:
frame = self._get_numeric_data()
else:
frame = self
count_axis = frame._get_axis(axis)
agg_axis = frame._get_agg_axis(axis)
if not isinstance(count_axis, MultiIndex):
raise TypeError("Can only count levels on hierarchical %s." %
self._get_axis_name(axis))
if frame._is_mixed_type:
# Since we have mixed types, calling notnull(frame.values) might
# upcast everything to object
mask = notnull(frame).values
else:
# But use the speedup when we have homogeneous dtypes
mask = notnull(frame.values)
if axis == 1:
# We're transposing the mask rather than frame to avoid potential
# upcasts to object, which induces a ~20x slowdown
mask = mask.T
if isinstance(level, compat.string_types):
level = count_axis._get_level_number(level)
level_index = count_axis.levels[level]
labels = com._ensure_int64(count_axis.labels[level])
counts = lib.count_level_2d(mask, labels, len(level_index))
result = DataFrame(counts, index=level_index,
columns=agg_axis)
if axis == 1:
# Undo our earlier transpose
return result.T
else:
return result
def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None,
filter_type=None, **kwds):
axis = self._get_axis_number(axis)
f = lambda x: op(x, axis=axis, skipna=skipna, **kwds)
labels = self._get_agg_axis(axis)
# exclude timedelta/datetime unless we are uniform types
if axis == 1 and self._is_mixed_type and self._is_datelike_mixed_type:
numeric_only = True
if numeric_only is None:
try:
values = self.values
result = f(values)
except Exception as e:
# try by-column first
if filter_type is None and axis == 0:
try:
# this can end up with a non-reduction
# but not always. if the types are mixed
# with datelike then need to make sure a series
result = self.apply(f,reduce=False)
if result.ndim == self.ndim:
result = result.iloc[0]
return result
except:
pass
if filter_type is None or filter_type == 'numeric':
data = self._get_numeric_data()
elif filter_type == 'bool':
data = self._get_bool_data()
else: # pragma: no cover
e = NotImplementedError("Handling exception with filter_"
"type %s not implemented."
% filter_type)
raise_with_traceback(e)
result = f(data.values)
labels = data._get_agg_axis(axis)
else:
if numeric_only:
if filter_type is None or filter_type == 'numeric':
data = self._get_numeric_data()
elif filter_type == 'bool':
data = self._get_bool_data()
else: # pragma: no cover
msg = ("Generating numeric_only data with filter_type %s"
"not supported." % filter_type)
raise NotImplementedError(msg)
values = data.values
labels = data._get_agg_axis(axis)
else:
values = self.values
result = f(values)
if is_object_dtype(result.dtype):
try:
if filter_type is None or filter_type == 'numeric':
result = result.astype(np.float64)
elif filter_type == 'bool' and notnull(result).all():
result = result.astype(np.bool_)
except (ValueError, TypeError):
# try to coerce to the original dtypes item by item if we can
if axis == 0:
result = com._coerce_to_dtypes(result, self.dtypes)
return Series(result, index=labels)
def idxmin(self, axis=0, skipna=True):
"""
Return index of first occurrence of minimum over requested axis.
NA/null values are excluded.
Parameters
----------
axis : {0, 1}
0 for row-wise, 1 for column-wise
skipna : boolean, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA
Returns
-------
idxmin : Series
Notes
-----
This method is the DataFrame version of ``ndarray.argmin``.
See Also
--------
Series.idxmin
"""
axis = self._get_axis_number(axis)
indices = nanops.nanargmin(self.values, axis=axis, skipna=skipna)
index = self._get_axis(axis)
result = [index[i] if i >= 0 else NA for i in indices]
return Series(result, index=self._get_agg_axis(axis))
def idxmax(self, axis=0, skipna=True):
"""
Return index of first occurrence of maximum over requested axis.
NA/null values are excluded.
Parameters
----------
axis : {0, 1}
0 for row-wise, 1 for column-wise
skipna : boolean, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be first index.
Returns
-------
idxmax : Series
Notes
-----
This method is the DataFrame version of ``ndarray.argmax``.
See Also
--------
Series.idxmax
"""
axis = self._get_axis_number(axis)
indices = nanops.nanargmax(self.values, axis=axis, skipna=skipna)
index = self._get_axis(axis)
result = [index[i] if i >= 0 else NA for i in indices]
return Series(result, index=self._get_agg_axis(axis))
def _get_agg_axis(self, axis_num):
""" let's be explict about this """
if axis_num == 0:
return self.columns
elif axis_num == 1:
return self.index
else:
raise ValueError('Axis must be 0 or 1 (got %r)' % axis_num)
def mode(self, axis=0, numeric_only=False):
"""
Gets the mode(s) of each element along the axis selected. Empty if nothing
has 2+ occurrences. Adds a row for each mode per label, fills in gaps
with nan.
Note that there could be multiple values returned for the selected
axis (when more than one item share the maximum frequency), which is the
reason why a dataframe is returned. If you want to impute missing values
with the mode in a dataframe ``df``, you can just do this:
``df.fillna(df.mode().iloc[0])``
Parameters
----------
axis : {0, 1, 'index', 'columns'} (default 0)
* 0/'index' : get mode of each column
* 1/'columns' : get mode of each row
numeric_only : boolean, default False
if True, only apply to numeric columns
Returns
-------
modes : DataFrame (sorted)
Examples
--------
>>> df = pd.DataFrame({'A': [1, 2, 1, 2, 1, 2, 3]})
>>> df.mode()
A
0 1
1 2
"""
data = self if not numeric_only else self._get_numeric_data()
f = lambda s: s.mode()
return data.apply(f, axis=axis)
def quantile(self, q=0.5, axis=0, numeric_only=True):
"""
Return values at the given quantile over requested axis, a la
numpy.percentile.
Parameters
----------
q : float or array-like, default 0.5 (50% quantile)
0 <= q <= 1, the quantile(s) to compute
axis : {0, 1}
0 for row-wise, 1 for column-wise
Returns
-------
quantiles : Series or DataFrame
If ``q`` is an array, a DataFrame will be returned where the
index is ``q``, the columns are the columns of self, and the
values are the quantiles.
If ``q`` is a float, a Series will be returned where the
index is the columns of self and the values are the quantiles.
Examples
--------
>>> df = DataFrame(np.array([[1, 1], [2, 10], [3, 100], [4, 100]]),
columns=['a', 'b'])
>>> df.quantile(.1)
a 1.3
b 3.7
dtype: float64
>>> df.quantile([.1, .5])
a b
0.1 1.3 3.7
0.5 2.5 55.0
"""
per = np.asarray(q) * 100
if not com.is_list_like(per):
per = [per]
q = [q]
squeeze = True
else:
squeeze = False
def f(arr, per):
if arr._is_datelike_mixed_type:
values = _values_from_object(arr).view('i8')
else:
values = arr.astype(float)
values = values[notnull(values)]
if len(values) == 0:
return NA
else:
return _quantile(values, per)
data = self._get_numeric_data() if numeric_only else self
if axis == 1:
data = data.T
# need to know which cols are timestamp going in so that we can
# map timestamp over them after getting the quantile.
is_dt_col = data.dtypes.map(com.is_datetime64_dtype)
is_dt_col = is_dt_col[is_dt_col].index
quantiles = [[f(vals, x) for x in per]
for (_, vals) in data.iteritems()]
result = DataFrame(quantiles, index=data._info_axis, columns=q).T
if len(is_dt_col) > 0:
result[is_dt_col] = result[is_dt_col].applymap(lib.Timestamp)
if squeeze:
if result.shape == (1, 1):
result = result.T.iloc[:, 0] # don't want scalar
else:
result = result.T.squeeze()
result.name = None # For groupby, so it can set an index name
return result
def rank(self, axis=0, numeric_only=None, method='average',
na_option='keep', ascending=True, pct=False):
"""
Compute numerical data ranks (1 through n) along axis. Equal values are
assigned a rank that is the average of the ranks of those values
Parameters
----------
axis : {0, 1}, default 0
Ranks over columns (0) or rows (1)
numeric_only : boolean, default None
Include only float, int, boolean data
method : {'average', 'min', 'max', 'first', 'dense'}
* average: average rank of group
* min: lowest rank in group
* max: highest rank in group
* first: ranks assigned in order they appear in the array
* dense: like 'min', but rank always increases by 1 between groups
na_option : {'keep', 'top', 'bottom'}
* keep: leave NA values where they are
* top: smallest rank if ascending
* bottom: smallest rank if descending
ascending : boolean, default True
False for ranks by high (1) to low (N)
pct : boolean, default False
Computes percentage rank of data
Returns
-------
ranks : DataFrame
"""
axis = self._get_axis_number(axis)
if numeric_only is None:
try:
ranks = algos.rank(self.values, axis=axis, method=method,
ascending=ascending, na_option=na_option,
pct=pct)
return self._constructor(ranks, index=self.index,
columns=self.columns)
except TypeError:
numeric_only = True
if numeric_only:
data = self._get_numeric_data()
else:
data = self
ranks = algos.rank(data.values, axis=axis, method=method,
ascending=ascending, na_option=na_option, pct=pct)
return self._constructor(ranks, index=data.index, columns=data.columns)
def to_timestamp(self, freq=None, how='start', axis=0, copy=True):
"""
Cast to DatetimeIndex of timestamps, at *beginning* of period
Parameters
----------
freq : string, default frequency of PeriodIndex
Desired frequency
how : {'s', 'e', 'start', 'end'}
Convention for converting period to timestamp; start of period
vs. end
axis : {0, 1} default 0
The axis to convert (the index by default)
copy : boolean, default True
If false then underlying input data is not copied
Returns
-------
df : DataFrame with DatetimeIndex
"""
new_data = self._data
if copy:
new_data = new_data.copy()
axis = self._get_axis_number(axis)
if axis == 0:
new_data.set_axis(1, self.index.to_timestamp(freq=freq, how=how))
elif axis == 1:
new_data.set_axis(0, self.columns.to_timestamp(freq=freq, how=how))
else: # pragma: no cover
raise AssertionError('Axis must be 0 or 1. Got %s' % str(axis))
return self._constructor(new_data)
def to_period(self, freq=None, axis=0, copy=True):
"""
Convert DataFrame from DatetimeIndex to PeriodIndex with desired
frequency (inferred from index if not passed)
Parameters
----------
freq : string, default
axis : {0, 1}, default 0
The axis to convert (the index by default)
copy : boolean, default True
If False then underlying input data is not copied
Returns
-------
ts : TimeSeries with PeriodIndex
"""
new_data = self._data
if copy:
new_data = new_data.copy()
axis = self._get_axis_number(axis)
if axis == 0:
new_data.set_axis(1, self.index.to_period(freq=freq))
elif axis == 1:
new_data.set_axis(0, self.columns.to_period(freq=freq))
else: # pragma: no cover
raise AssertionError('Axis must be 0 or 1. Got %s' % str(axis))
return self._constructor(new_data)
def isin(self, values):
"""
Return boolean DataFrame showing whether each element in the
DataFrame is contained in values.
Parameters
----------
values : iterable, Series, DataFrame or dictionary
The result will only be true at a location if all the
labels match. If `values` is a Series, that's the index. If
`values` is a dictionary, the keys must be the column names,
which must match. If `values` is a DataFrame,
then both the index and column labels must match.
Returns
-------
DataFrame of booleans
Examples
--------
When ``values`` is a list:
>>> df = DataFrame({'A': [1, 2, 3], 'B': ['a', 'b', 'f']})
>>> df.isin([1, 3, 12, 'a'])
A B
0 True True
1 False False
2 True False
When ``values`` is a dict:
>>> df = DataFrame({'A': [1, 2, 3], 'B': [1, 4, 7]})
>>> df.isin({'A': [1, 3], 'B': [4, 7, 12]})
A B
0 True False # Note that B didn't match the 1 here.
1 False True
2 True True
When ``values`` is a Series or DataFrame:
>>> df = DataFrame({'A': [1, 2, 3], 'B': ['a', 'b', 'f']})
>>> other = DataFrame({'A': [1, 3, 3, 2], 'B': ['e', 'f', 'f', 'e']})
>>> df.isin(other)
A B
0 True False
1 False False # Column A in `other` has a 3, but not at index 1.
2 True True
"""
if isinstance(values, dict):
from collections import defaultdict
from pandas.tools.merge import concat
values = defaultdict(list, values)
return concat((self.iloc[:, [i]].isin(values[col])
for i, col in enumerate(self.columns)), axis=1)
elif isinstance(values, Series):
if not values.index.is_unique:
raise ValueError("ValueError: cannot compute isin with"
" a duplicate axis.")
return self.eq(values.reindex_like(self), axis='index')
elif isinstance(values, DataFrame):
if not (values.columns.is_unique and values.index.is_unique):
raise ValueError("ValueError: cannot compute isin with"
" a duplicate axis.")
return self.eq(values.reindex_like(self))
else:
if not is_list_like(values):
raise TypeError("only list-like or dict-like objects are"
" allowed to be passed to DataFrame.isin(), "
"you passed a "
"{0!r}".format(type(values).__name__))
return DataFrame(lib.ismember(self.values.ravel(),
set(values)).reshape(self.shape),
self.index,
self.columns)
#----------------------------------------------------------------------
# Deprecated stuff
def combineAdd(self, other):
"""
Add two DataFrame objects and do not propagate
NaN values, so if for a (column, time) one frame is missing a
value, it will default to the other frame's value (which might
be NaN as well)
Parameters
----------
other : DataFrame
Returns
-------
DataFrame
"""
return self.add(other, fill_value=0.)
def combineMult(self, other):
"""
Multiply two DataFrame objects and do not propagate NaN values, so if
for a (column, time) one frame is missing a value, it will default to
the other frame's value (which might be NaN as well)
Parameters
----------
other : DataFrame
Returns
-------
DataFrame
"""
return self.mul(other, fill_value=1.)
DataFrame._setup_axes(['index', 'columns'], info_axis=1, stat_axis=0,
axes_are_reversed=True, aliases={'rows': 0})
DataFrame._add_numeric_operations()
_EMPTY_SERIES = Series([])
def _arrays_to_mgr(arrays, arr_names, index, columns, dtype=None):
"""
Segregate Series based on type and coerce into matrices.
Needs to handle a lot of exceptional cases.
"""
# figure out the index, if necessary
if index is None:
index = extract_index(arrays)
else:
index = _ensure_index(index)
# don't force copy because getting jammed in an ndarray anyway
arrays = _homogenize(arrays, index, dtype)
# from BlockManager perspective
axes = [_ensure_index(columns), _ensure_index(index)]
return create_block_manager_from_arrays(arrays, arr_names, axes)
def extract_index(data):
from pandas.core.index import _union_indexes
index = None
if len(data) == 0:
index = Index([])
elif len(data) > 0:
raw_lengths = []
indexes = []
have_raw_arrays = False
have_series = False
have_dicts = False
for v in data:
if isinstance(v, Series):
have_series = True
indexes.append(v.index)
elif isinstance(v, dict):
have_dicts = True
indexes.append(list(v.keys()))
elif is_list_like(v) and getattr(v, 'ndim', 1) == 1:
have_raw_arrays = True
raw_lengths.append(len(v))
if not indexes and not raw_lengths:
raise ValueError('If using all scalar values, you must pass'
' an index')
if have_series or have_dicts:
index = _union_indexes(indexes)
if have_raw_arrays:
lengths = list(set(raw_lengths))
if len(lengths) > 1:
raise ValueError('arrays must all be same length')
if have_dicts:
raise ValueError('Mixing dicts with non-Series may lead to '
'ambiguous ordering.')
if have_series:
if lengths[0] != len(index):
msg = ('array length %d does not match index length %d'
% (lengths[0], len(index)))
raise ValueError(msg)
else:
index = Index(np.arange(lengths[0]))
return _ensure_index(index)
def _prep_ndarray(values, copy=True):
if not isinstance(values, (np.ndarray, Series, Index)):
if len(values) == 0:
return np.empty((0, 0), dtype=object)
def convert(v):
return com._possibly_convert_platform(v)
# we could have a 1-dim or 2-dim list here
# this is equiv of np.asarray, but does object conversion
# and platform dtype preservation
try:
if com.is_list_like(values[0]) or hasattr(values[0], 'len'):
values = np.array([convert(v) for v in values])
else:
values = convert(values)
except:
values = convert(values)
else:
# drop subclass info, do not copy data
values = np.asarray(values)
if copy:
values = values.copy()
if values.ndim == 1:
values = values.reshape((values.shape[0], 1))
elif values.ndim != 2:
raise ValueError('Must pass 2-d input')
return values
def _to_arrays(data, columns, coerce_float=False, dtype=None):
"""
Return list of arrays, columns
"""
if isinstance(data, DataFrame):
if columns is not None:
arrays = [data.icol(i).values for i, col in enumerate(data.columns)
if col in columns]
else:
columns = data.columns
arrays = [data.icol(i).values for i in range(len(columns))]
return arrays, columns
if not len(data):
if isinstance(data, np.ndarray):
columns = data.dtype.names
if columns is not None:
return [[]] * len(columns), columns
return [], [] # columns if columns is not None else []
if isinstance(data[0], (list, tuple)):
return _list_to_arrays(data, columns, coerce_float=coerce_float,
dtype=dtype)
elif isinstance(data[0], collections.Mapping):
return _list_of_dict_to_arrays(data, columns,
coerce_float=coerce_float,
dtype=dtype)
elif isinstance(data[0], Series):
return _list_of_series_to_arrays(data, columns,
coerce_float=coerce_float,
dtype=dtype)
elif isinstance(data[0], Categorical):
if columns is None:
columns = _default_index(len(data))
return data, columns
elif (isinstance(data, (np.ndarray, Series, Index))
and data.dtype.names is not None):
columns = list(data.dtype.names)
arrays = [data[k] for k in columns]
return arrays, columns
else:
# last ditch effort
data = lmap(tuple, data)
return _list_to_arrays(data, columns,
coerce_float=coerce_float,
dtype=dtype)
def _masked_rec_array_to_mgr(data, index, columns, dtype, copy):
""" extract from a masked rec array and create the manager """
# essentially process a record array then fill it
fill_value = data.fill_value
fdata = ma.getdata(data)
if index is None:
index = _get_names_from_index(fdata)
if index is None:
index = _default_index(len(data))
index = _ensure_index(index)
if columns is not None:
columns = _ensure_index(columns)
arrays, arr_columns = _to_arrays(fdata, columns)
# fill if needed
new_arrays = []
for fv, arr, col in zip(fill_value, arrays, arr_columns):
mask = ma.getmaskarray(data[col])
if mask.any():
arr, fv = _maybe_upcast(arr, fill_value=fv, copy=True)
arr[mask] = fv
new_arrays.append(arr)
# create the manager
arrays, arr_columns = _reorder_arrays(new_arrays, arr_columns, columns)
if columns is None:
columns = arr_columns
mgr = _arrays_to_mgr(arrays, arr_columns, index, columns)
if copy:
mgr = mgr.copy()
return mgr
def _reorder_arrays(arrays, arr_columns, columns):
# reorder according to the columns
if (columns is not None and len(columns) and arr_columns is not None and
len(arr_columns)):
indexer = _ensure_index(
arr_columns).get_indexer(columns)
arr_columns = _ensure_index(
[arr_columns[i] for i in indexer])
arrays = [arrays[i] for i in indexer]
return arrays, arr_columns
def _list_to_arrays(data, columns, coerce_float=False, dtype=None):
if len(data) > 0 and isinstance(data[0], tuple):
content = list(lib.to_object_array_tuples(data).T)
else:
# list of lists
content = list(lib.to_object_array(data).T)
return _convert_object_array(content, columns, dtype=dtype,
coerce_float=coerce_float)
def _list_of_series_to_arrays(data, columns, coerce_float=False, dtype=None):
from pandas.core.index import _get_combined_index
if columns is None:
columns = _get_combined_index([
s.index for s in data if getattr(s, 'index', None) is not None
])
indexer_cache = {}
aligned_values = []
for s in data:
index = getattr(s, 'index', None)
if index is None:
index = _default_index(len(s))
if id(index) in indexer_cache:
indexer = indexer_cache[id(index)]
else:
indexer = indexer_cache[id(index)] = index.get_indexer(columns)
values = _values_from_object(s)
aligned_values.append(com.take_1d(values, indexer))
values = np.vstack(aligned_values)
if values.dtype == np.object_:
content = list(values.T)
return _convert_object_array(content, columns, dtype=dtype,
coerce_float=coerce_float)
else:
return values.T, columns
def _list_of_dict_to_arrays(data, columns, coerce_float=False, dtype=None):
if columns is None:
gen = (list(x.keys()) for x in data)
columns = lib.fast_unique_multiple_list_gen(gen)
# assure that they are of the base dict class and not of derived
# classes
data = [(type(d) is dict) and d or dict(d) for d in data]
content = list(lib.dicts_to_array(data, list(columns)).T)
return _convert_object_array(content, columns, dtype=dtype,
coerce_float=coerce_float)
def _convert_object_array(content, columns, coerce_float=False, dtype=None):
if columns is None:
columns = _default_index(len(content))
else:
if len(columns) != len(content): # pragma: no cover
# caller's responsibility to check for this...
raise AssertionError('%d columns passed, passed data had %s '
'columns' % (len(columns), len(content)))
# provide soft conversion of object dtypes
def convert(arr):
if dtype != object and dtype != np.object:
arr = lib.maybe_convert_objects(arr, try_float=coerce_float)
arr = com._possibly_cast_to_datetime(arr, dtype)
return arr
arrays = [ convert(arr) for arr in content ]
return arrays, columns
def _get_names_from_index(data):
index = lrange(len(data))
has_some_name = any([getattr(s, 'name', None) is not None for s in data])
if not has_some_name:
return index
count = 0
for i, s in enumerate(data):
n = getattr(s, 'name', None)
if n is not None:
index[i] = n
else:
index[i] = 'Unnamed %d' % count
count += 1
return index
def _homogenize(data, index, dtype=None):
from pandas.core.series import _sanitize_array
oindex = None
homogenized = []
for v in data:
if isinstance(v, Series):
if dtype is not None:
v = v.astype(dtype)
if v.index is not index:
# Forces alignment. No need to copy data since we
# are putting it into an ndarray later
v = v.reindex(index, copy=False)
else:
if isinstance(v, dict):
if oindex is None:
oindex = index.astype('O')
if type(v) == dict:
# fast cython method
v = lib.fast_multiget(v, oindex.values, default=NA)
else:
v = lib.map_infer(oindex.values, v.get)
v = _sanitize_array(v, index, dtype=dtype, copy=False,
raise_cast_failure=False)
homogenized.append(v)
return homogenized
def _from_nested_dict(data):
# TODO: this should be seriously cythonized
new_data = OrderedDict()
for index, s in compat.iteritems(data):
for col, v in compat.iteritems(s):
new_data[col] = new_data.get(col, OrderedDict())
new_data[col][index] = v
return new_data
def _put_str(s, space):
return ('%s' % s)[:space].ljust(space)
#----------------------------------------------------------------------
# Add plotting methods to DataFrame
import pandas.tools.plotting as gfx
DataFrame.plot = gfx.plot_frame
DataFrame.hist = gfx.hist_frame
@Appender(_shared_docs['boxplot'] % _shared_doc_kwargs)
def boxplot(self, column=None, by=None, ax=None, fontsize=None,
rot=0, grid=True, figsize=None, layout=None, return_type=None,
**kwds):
import pandas.tools.plotting as plots
import matplotlib.pyplot as plt
ax = plots.boxplot(self, column=column, by=by, ax=ax,
fontsize=fontsize, grid=grid, rot=rot,
figsize=figsize, layout=layout, return_type=return_type,
**kwds)
plt.draw_if_interactive()
return ax
DataFrame.boxplot = boxplot
ops.add_flex_arithmetic_methods(DataFrame, **ops.frame_flex_funcs)
ops.add_special_arithmetic_methods(DataFrame, **ops.frame_special_funcs)
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
| if isinstance(index, PeriodIndex):
values = index.asobject.values
elif (isinstance(index, DatetimeIndex) and
index.tz is not None):
values = index.asobject
else:
values = index.values
if values.dtype == np.object_:
values = lib.maybe_convert_objects(values)
# if we have the labels, extract the values with a mask
if labels is not None:
mask = labels == -1
values = values.take(labels)
if mask.any():
values, changed = com._maybe_upcast_putmask(values,
mask, np.nan)
return values |
run_language_modeling.py | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Fine-tuning the library models for language modeling on a text file (GPT, GPT-2, BERT, RoBERTa).
GPT and GPT-2 are fine-tuned using a causal language modeling (CLM) loss while BERT and RoBERTa are fine-tuned
using a masked language modeling (MLM) loss.
"""
import logging
import math
import os
from dataclasses import dataclass, field
from typing import Optional
from transformers import (
CONFIG_MAPPING,
MODEL_WITH_LM_HEAD_MAPPING,
AutoConfig,
AutoModelWithLMHead,
AutoTokenizer,
DataCollatorForLanguageModeling,
HfArgumentParser,
LineByLineTextDataset,
PreTrainedTokenizer,
TextDataset,
Trainer,
TrainingArguments,
set_seed,
)
logger = logging.getLogger(__name__)
MODEL_CONFIG_CLASSES = list(MODEL_WITH_LM_HEAD_MAPPING.keys())
MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES)
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune, or train from scratch.
"""
model_name_or_path: Optional[str] = field(
default=None,
metadata={
"help": "The model checkpoint for weights initialization. Leave None if you want to train a model from scratch."
},
)
model_type: Optional[str] = field(
default=None,
metadata={"help": "If training from scratch, pass a model type from the list: " + ", ".join(MODEL_TYPES)},
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
)
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
)
cache_dir: Optional[str] = field(
default=None, metadata={"help": "Where do you want to store the pretrained models downloaded from s3"}
)
use_fast: bool = field(default=False, metadata={"help": "Set this flag to use fast tokenization."})
@dataclass
class DataTrainingArguments:
"""
Arguments pertaining to what data we are going to input our model for training and eval.
"""
train_data_file: Optional[str] = field(
default=None, metadata={"help": "The input training data file (a text file)."}
)
eval_data_file: Optional[str] = field(
default=None,
metadata={"help": "An optional input evaluation data file to evaluate the perplexity on (a text file)."},
)
line_by_line: bool = field(
default=False,
metadata={"help": "Whether distinct lines of text in the dataset are to be handled as distinct sequences."},
)
mlm: bool = field(
default=False, metadata={"help": "Train with masked-language modeling loss instead of language modeling."}
)
mlm_probability: float = field(
default=0.15, metadata={"help": "Ratio of tokens to mask for masked language modeling loss"}
)
block_size: int = field(
default=-1,
metadata={
"help": "Optional input sequence length after tokenization."
"The training dataset will be truncated in block of this size for training."
"Default to the model max input length for single sentence inputs (take into account special tokens)."
},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached training and evaluation sets"}
)
def get_dataset(args: DataTrainingArguments, tokenizer: PreTrainedTokenizer, evaluate=False):
file_path = args.eval_data_file if evaluate else args.train_data_file
if args.line_by_line:
return LineByLineTextDataset(tokenizer=tokenizer, file_path=file_path, block_size=args.block_size)
else:
return TextDataset(
tokenizer=tokenizer, file_path=file_path, block_size=args.block_size, overwrite_cache=args.overwrite_cache
)
def main():
# See all possible arguments in src/transformers/training_args.py
# or by passing the --help flag to this script.
# We now keep distinct sets of args, for a cleaner separation of concerns.
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
if data_args.eval_data_file is None and training_args.do_eval:
raise ValueError(
"Cannot do evaluation without an evaluation data file. Either supply a file to --eval_data_file "
"or remove the --do_eval argument."
)
if (
os.path.exists(training_args.output_dir)
and os.listdir(training_args.output_dir)
and training_args.do_train
and not training_args.overwrite_output_dir
):
raise ValueError(
f"Output directory ({training_args.output_dir}) already exists and is not empty. Use --overwrite_output_dir to overcome."
)
# Setup logging
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO if training_args.local_rank in [-1, 0] else logging.WARN,
)
logger.warning(
"Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s",
training_args.local_rank,
training_args.device,
training_args.n_gpu,
bool(training_args.local_rank != -1),
training_args.fp16,
)
logger.info("Training/evaluation parameters %s", training_args)
# Set seed
set_seed(training_args.seed)
# Load pretrained model and tokenizer
#
# Distributed training:
# The .from_pretrained methods guarantee that only one local process can concurrently
# download model & vocab.
if model_args.config_name:
config = AutoConfig.from_pretrained(model_args.config_name, cache_dir=model_args.cache_dir)
elif model_args.model_name_or_path:
config = AutoConfig.from_pretrained(model_args.model_name_or_path, cache_dir=model_args.cache_dir)
else:
config = CONFIG_MAPPING[model_args.model_type]()
logger.warning("You are instantiating a new config instance from scratch.")
if model_args.tokenizer_name:
tokenizer = AutoTokenizer.from_pretrained(model_args.tokenizer_name, cache_dir=model_args.cache_dir, use_fast=model_args.use_fast)
elif model_args.model_name_or_path:
tokenizer = AutoTokenizer.from_pretrained(model_args.model_name_or_path, cache_dir=model_args.cache_dir)
else:
raise ValueError(
"You are instantiating a new tokenizer from scratch. This is not supported, but you can do it from another script, save it,"
"and load it from here, using --tokenizer_name"
)
if model_args.model_name_or_path:
model = AutoModelWithLMHead.from_pretrained(
model_args.model_name_or_path,
from_tf=bool(".ckpt" in model_args.model_name_or_path),
config=config,
cache_dir=model_args.cache_dir,
)
else:
logger.info("Training new model from scratch")
model = AutoModelWithLMHead.from_config(config)
model.resize_token_embeddings(len(tokenizer))
if config.model_type in ["bert", "roberta", "distilbert", "camembert"] and not data_args.mlm:
raise ValueError(
"BERT and RoBERTa-like models do not have LM heads but masked LM heads. They must be run using the --mlm "
"flag (masked language modeling)."
)
if data_args.block_size <= 0:
data_args.block_size = tokenizer.max_len
# Our input block size will be the max possible for the model
else:
data_args.block_size = min(data_args.block_size, tokenizer.max_len)
# Get datasets
train_dataset = get_dataset(data_args, tokenizer=tokenizer) if training_args.do_train else None
eval_dataset = get_dataset(data_args, tokenizer=tokenizer, evaluate=True) if training_args.do_eval else None
data_collator = DataCollatorForLanguageModeling(
tokenizer=tokenizer, mlm=data_args.mlm, mlm_probability=data_args.mlm_probability
)
# Initialize our Trainer
trainer = Trainer(
model=model,
args=training_args,
data_collator=data_collator,
train_dataset=train_dataset,
eval_dataset=eval_dataset,
prediction_loss_only=True,
)
# Training
if training_args.do_train:
model_path = (
model_args.model_name_or_path
if model_args.model_name_or_path is not None and os.path.isdir(model_args.model_name_or_path)
else None
)
trainer.train(model_path=model_path)
trainer.save_model() | # so that you can share your model easily on huggingface.co/models =)
if trainer.is_world_master():
tokenizer.save_pretrained(training_args.output_dir)
# Evaluation
results = {}
if training_args.do_eval:
logger.info("*** Evaluate ***")
eval_output = trainer.evaluate()
perplexity = math.exp(eval_output["eval_loss"])
result = {"perplexity": perplexity}
output_eval_file = os.path.join(training_args.output_dir, "eval_results_lm.txt")
if trainer.is_world_master():
with open(output_eval_file, "w") as writer:
logger.info("***** Eval results *****")
for key in sorted(result.keys()):
logger.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
results.update(result)
return results
def _mp_fn(index):
# For xla_spawn (TPUs)
main()
if __name__ == "__main__":
main() | # For convenience, we also re-save the tokenizer to the same directory, |
runtests.py | import sys
from django.conf import settings
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='whisk_tutorial.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'whisk_tutorial',),
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
failures = test_runner.run_tests(['whisk_tutorial', ])
if failures: | sys.exit(failures) | |
function.rs | use sexpr_ir::gast::{GAst, constant::Constant, list::List};
use crate::{ast::{Function, TopLevel}, error::{CompilerError, bad_syntax, incomplete_expr, invalid_expr_type, invalid_list_tail}, sexpr_to_ast::symbol_from_sexpr};
use super::FromSexpr;
impl FromSexpr<List, Function> for Function {
fn from_sexpr(i: &List) -> Result<Function, Vec<CompilerError>> {
let mut error_buffer = vec![];
// check is not tail
if i.1.is_some() {
error_buffer.push(invalid_list_tail(&*i));
}
let mut iter = i.0.iter();
// get def headle
let def_headle = iter.next().ok_or_else(|| vec![incomplete_expr(&*i)])?;
let (def_headle, pos) = if let GAst::Const(Constant::Sym(sym)) = def_headle {
(sym.0.clone(), sym.1.clone())
} else {
error_buffer.push(invalid_expr_type(i, ()));
return Err(error_buffer);
};
// process prarms
let prarms = iter
.next()
.ok_or_else(|| vec![incomplete_expr(&*i)])?
.get_list()
.ok_or_else(|| vec![invalid_expr_type(i, ())])?;
let List(prarms, extend_prarms) = (*prarms).clone();
let mut prarms = prarms.iter();
let function_name = if *def_headle == "defun" {
let name = prarms.next().ok_or_else(|| vec![incomplete_expr(&*i)])?;
let name = symbol_from_sexpr(name);
if let Err(e) = name.clone() {
error_buffer.push(e);
}
Some(name)
} else if *def_headle == "lambda" {
None
} else {
error_buffer.push(bad_syntax(&*i));
return Err(error_buffer);
};
// prarms
let prarms = prarms.map(symbol_from_sexpr).fold(vec![], |mut pair, x| {
if let Ok(x) = x {
pair.push(x);
} else if let Err(e) = x {
error_buffer.push(e);
}
pair
});
// process extend prarms
let extend_prarms = extend_prarms.map(|x| symbol_from_sexpr(&x));
if let Some(Err(e)) = extend_prarms.clone() {
error_buffer.push(e);
}
// process bodys
let bodys: Vec<_> = iter.collect();
/*
if bodys.is_empty() {
error_buffer.push(CompilerError::IncompleteExpr(i.to_string()));
return Err(error_buffer);
}
// */
let bodys = bodys
.iter()
.cloned()
.map(TopLevel::from_sexpr)
.fold(vec![], |mut pair, x| {
if let Ok(x) = x {
pair.push(x);
} else if let Err(mut e) = x {
error_buffer.append(&mut e);
}
pair
});
if !error_buffer.is_empty() {
Err(error_buffer)
} else {
let r = Function {
name: function_name.map(|x| x.unwrap()),
params: prarms, | Ok(r)
}
}
} | extend_params: extend_prarms.map(|x| x.unwrap()),
body: bodys,
pos
}; |
index.js | // of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
/* eslint-disable max-len */
// Intialize globals, check version
import './lib/init';
// Import shaderlib to make sure shader modules are initialized
import './shaderlib';
// Core Library
export {COORDINATE_SYSTEM} from './lib/constants';
// Experimental Pure JS (non-React) bindings
export {default as Deck} from './lib/deck';
export {default as LayerManager} from './lib/layer-manager';
export {default as AttributeManager} from './lib/attribute-manager';
export {default as Layer} from './lib/layer';
export {default as CompositeLayer} from './lib/composite-layer';
// Viewports
export {default as Viewport} from './viewports/viewport';
export {default as WebMercatorViewport} from './viewports/web-mercator-viewport';
// Shader modules
export {default as project} from './shaderlib/project/project';
export {default as project64} from './shaderlib/project64/project64';
export {default as lighting} from './shaderlib/lighting/lighting';
export {default as View} from './views/view';
export {default as MapView} from './views/map-view';
export {default as FirstPersonView} from './views/first-person-view';
export {default as ThirdPersonView} from './views/third-person-view';
export {default as OrbitView} from './views/orbit-view';
export {default as PerspectiveView} from './views/perspective-view';
export {default as OrthographicView} from './views/orthographic-view';
// Controllers
export {default as Controller} from './controllers/controller';
export {default as MapController} from './controllers/map-controller';
// Experimental Controllers
export {default as _FirstPersonController} from './controllers/first-person-controller';
export {default as _OrbitController} from './controllers/orbit-controller';
// EXPERIMENTAL EXPORTS
// Experimental Effects (non-React) bindings
export {default as _EffectManager} from './experimental/lib/effect-manager';
export {default as _Effect} from './experimental/lib/effect';
export {default as _ReflectionEffect} from './experimental/reflection-effect/reflection-effect';
// Eperimental Transitions
export {TRANSITION_EVENTS as _TRANSITION_EVENTS} from './controllers/transition-manager';
export {default as _LinearInterpolator} from './transitions/linear-interpolator';
export {default as _ViewportFlyToInterpolator} from './transitions/viewport-fly-to-interpolator';
// Layer utilities
export {default as log} from './utils/log';
import {flattenVertices, fillArray} from './utils/flatten'; // Export? move to luma.gl or math.gl?
import {default as BinSorter} from './utils/bin-sorter';
import {defaultColorRange} from './utils/color-utils';
import {linearScale, getLinearScale, quantizeScale, getQuantizeScale} from './utils/scale-utils';
export {default as _GPUGridAggregator} from './experimental/utils/gpu-grid-aggregator';
// Exports for layers
// Experimental Features may change in minor version bumps, use at your own risk)
export const experimental = {
BinSorter,
linearScale,
getLinearScale,
quantizeScale,
getQuantizeScale,
defaultColorRange,
flattenVertices,
fillArray
}; | // Copyright (c) 2015 - 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy |
|
team.component.ts | import { Component } from '@angular/core'
@Component({
selector: 'ngx-team',
styleUrls: ['./team.component.scss'],
templateUrl: './team.component.html',
})
export class | {
}
| TeamComponent |
prover_task_runner.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Prover task runner that runs multiple instances of the prover task and returns
//! as soon as the fastest instance finishes.
use crate::cli::Options;
use async_trait::async_trait;
use futures::{future::FutureExt, pin_mut, select};
use log::debug;
use rand::Rng;
use regex::Regex;
use std::{
process::Output,
sync::{
mpsc::{channel, Sender},
Arc,
},
};
use tokio::{
process::Command,
sync::{broadcast, broadcast::Receiver, Semaphore},
};
#[derive(Debug, Clone)]
enum BroadcastMsg {
Stop,
}
const MAX_PERMITS: usize = usize::MAX >> 4;
#[async_trait]
pub trait ProverTask {
type TaskResult: Send + 'static;
type TaskId: Send + Copy + 'static;
/// Initialize the task runner given the number of instances.
fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId>;
/// Run the task with task_id. This function will be called from one of the worker threads.
async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult;
/// Returns whether the task result is considered successful.
fn is_success(&self, task_result: &Self::TaskResult) -> bool;
}
pub struct ProverTaskRunner();
impl ProverTaskRunner {
/// Run `num_instances` instances of the prover `task` and returns the task id
/// as well as the result of the fastest running instance.
pub fn run_tasks<T>(
mut task: T,
num_instances: usize,
sequential: bool,
) -> (T::TaskId, T::TaskResult)
where
T: ProverTask + Clone + Send + 'static,
{
let rt = tokio::runtime::Builder::new()
.threaded_scheduler()
.enable_all()
.build()
.unwrap();
let sem = if sequential {
Arc::new(Semaphore::new(1))
} else {
Arc::new(Semaphore::new(MAX_PERMITS))
};
// Create channels for communication.
let (worker_tx, main_rx) = channel();
let (main_tx, _): (
tokio::sync::broadcast::Sender<BroadcastMsg>,
Receiver<BroadcastMsg>,
) = broadcast::channel(num_instances);
// Initialize the prover tasks.
let task_ids = task.init(num_instances);
for task_id in task_ids {
let s = sem.clone();
let send_n = worker_tx.clone();
let worker_rx = main_tx.subscribe();
let cloned_task = task.clone();
// Spawn a task worker for each task_id.
rt.spawn(async move {
Self::run_task_until_cancelled(cloned_task, task_id, send_n, worker_rx, s).await;
});
}
let mut num_working_instances = num_instances;
// Listens until one of the workers finishes.
loop {
// Result received from one worker.
let res = main_rx.recv();
if let Ok((task_id, result)) = res {
if num_working_instances == 1 {
return (task_id, result);
} else if task.is_success(&result) {
// Result is successful. Broadcast to other workers
// so they can stop working.
let _ = main_tx.send(BroadcastMsg::Stop);
return (task_id, result);
}
debug! {"previous instance failed, waiting for another worker to report..."}
num_working_instances -= 1;
}
}
}
// Run two async tasks, listening on broadcast channel and running the task, until
// either the task finishes running, or a stop message is received.
async fn run_task_until_cancelled<T>(
mut task: T,
task_id: T::TaskId,
tx: Sender<(T::TaskId, T::TaskResult)>,
rx: Receiver<BroadcastMsg>,
sem: Arc<Semaphore>,
) where
T: ProverTask,
{
let task_fut = task.run(task_id, sem).fuse();
let watchdog_fut = Self::watchdog(rx).fuse();
pin_mut!(task_fut, watchdog_fut);
select! {
_ = watchdog_fut => {
// A stop message is received.
}
res = task_fut => {
// Task finishes running, send the result to parent thread.
let _ = tx.send((task_id, res));
},
}
}
/// Waits for a stop message from the parent thread.
async fn watchdog(mut rx: Receiver<BroadcastMsg>) { | let _ = rx.recv().await;
}
}
#[derive(Debug, Clone)]
pub struct RunBoogieWithSeeds {
pub options: Options,
pub boogie_file: String,
}
#[async_trait]
impl ProverTask for RunBoogieWithSeeds {
type TaskResult = Output;
type TaskId = usize;
fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId> {
// If we are running only one Boogie instance, use the default random seed.
if num_instances == 1 {
return vec![self.options.backend.random_seed];
}
let mut rng = rand::thread_rng();
// Otherwise generate a list of random numbers to use as seeds.
(0..num_instances)
.map(|_| rng.gen::<u8>() as usize)
.collect()
}
async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult {
let _guard = sem.acquire().await;
let args = self.get_boogie_command(task_id);
debug!("runing Boogie command with seed {}", task_id);
Command::new(&args[0])
.args(&args[1..])
.kill_on_drop(true)
.output()
.await
.unwrap()
}
fn is_success(&self, task_result: &Self::TaskResult) -> bool {
if !task_result.status.success() {
return false;
}
let output = String::from_utf8_lossy(&task_result.stdout);
self.contains_compilation_error(&output) || !self.contains_timeout(&output)
}
}
impl RunBoogieWithSeeds {
/// Returns command line to call boogie.
pub fn get_boogie_command(&mut self, seed: usize) -> Vec<String> {
self.options
.backend
.boogie_flags
.push(format!("-proverOpt:O:smt.random_seed={}", seed));
self.options.get_boogie_command(&self.boogie_file)
}
/// Returns whether the output string contains any Boogie compilation errors.
fn contains_compilation_error(&self, output: &str) -> bool {
let regex =
Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*(Error:|error:).*$").unwrap();
regex.is_match(output)
}
/// Returns whether the output string contains any Boogie timeouts/inconclusiveness.
fn contains_timeout(&self, output: &str) -> bool {
let regex =
Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*Verification.*(inconclusive|out of resource|timed out).*$")
.unwrap();
regex.is_match(output)
}
} | |
counter.js | /**
* Counter metric
*/
'use strict';
const util = require('util');
const type = 'counter';
const { hashObject, isObject, getLabels, removeLabels } = require('./util');
const { validateLabel } = require('./validation');
const { Metric } = require('./metric');
class Counter extends Metric {
/**
* Increment counter
* @param {object} labels - What label you want to be incremented
* @param {Number} value - Value to increment, if omitted increment with 1
* @returns {void}
*/
inc(labels, value) {
let hash;
if (isObject(labels)) {
hash = hashObject(labels);
validateLabel(this.labelNames, labels);
} else {
value = labels;
labels = {};
}
if (value && !Number.isFinite(value)) {
throw new TypeError(`Value is not a valid number: ${util.format(value)}`);
}
if (value < 0) {
throw new Error('It is not possible to decrease a counter');
}
if (value === null || value === undefined) value = 1;
setValue(this.hashMap, value, labels, hash);
}
/**
* Reset counter
* @returns {void}
*/
reset() {
this.hashMap = {}; | setValue(this.hashMap, 0);
}
}
async get() {
if (this.collect) {
const v = this.collect();
if (v instanceof Promise) await v;
}
return {
help: this.help,
name: this.name,
type,
values: Object.values(this.hashMap),
aggregator: this.aggregator,
};
}
labels(...args) {
const labels = getLabels(this.labelNames, args) || {};
return {
inc: this.inc.bind(this, labels),
};
}
remove(...args) {
const labels = getLabels(this.labelNames, args) || {};
validateLabel(this.labelNames, labels);
return removeLabels.call(this, this.hashMap, labels);
}
}
function setValue(hashMap, value, labels = {}, hash = '') {
if (hashMap[hash]) {
hashMap[hash].value += value;
} else {
hashMap[hash] = { value, labels };
}
return hashMap;
}
module.exports = Counter; | if (this.labelNames.length === 0) { |
dirty_set_test.go | // Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package fsutil
import (
"reflect"
"testing"
"gvisor.googlesource.com/gvisor/pkg/sentry/memmap"
"gvisor.googlesource.com/gvisor/pkg/sentry/usermem"
)
func TestDirtySet(t *testing.T) | {
var set DirtySet
set.MarkDirty(memmap.MappableRange{0, 2 * usermem.PageSize})
set.KeepDirty(memmap.MappableRange{usermem.PageSize, 2 * usermem.PageSize})
set.MarkClean(memmap.MappableRange{0, 2 * usermem.PageSize})
want := &DirtySegmentDataSlices{
Start: []uint64{usermem.PageSize},
End: []uint64{2 * usermem.PageSize},
Values: []DirtyInfo{{Keep: true}},
}
if got := set.ExportSortedSlices(); !reflect.DeepEqual(got, want) {
t.Errorf("set:\n\tgot %v,\n\twant %v", got, want)
}
} |
|
selectors.test.ts | /*
* <<
* Davinci
* ==
* Copyright (C) 2016 - 2017 EDP
* ==
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* >>
*/
import {
selectDisplay,
makeSelectCurrentLayersOperationInfo,
makeSelectLayersBySlide,
makeSelectLayerIdsBySlide,
makeSelectCurrentLayerList,
selectCurrentLayers,
makeSelectCurrentLayerIds,
makeSelectSlideLayerContextValue,
makeSelectCurrentLayersMaxIndex,
makeSelectCurrentSelectedLayerList,
makeSelectCurrentEditLayerOperationInfo,
makeSelectCurrentSelectedLayerIds,
makeSelectCurrentOperatingLayerList,
makeSelectCurrentOtherLayerList,
makeSelectCurrentOperateItemParams,
makeSelectCurrentDisplayWidgets,
makeSelectClipboardLayers,
makeSelectCurrentDisplayShareToken,
makeSelectCurrentDisplayPasswordShareToken,
makeSelectCurrentDisplayPasswordSharePassword,
makeSelectCurrentDisplayAuthorizedShareToken,
makeSelectSharePanel,
makeSelectDisplayLoading,
makeSelectEditorBaselines
} from 'app/containers/Display/selectors'
import { displayInitialState } from 'app/containers/Display/reducer'
import {
mockGraphLayerId,
mockSlideId,
mockEditing,
mockDisplayState,
mockGraphLayerFormed,
mockGraphLayerInfo
} from './fixtures'
describe('displaySelector', () => {
let state
beforeEach(() => { | describe('selectDisplay', () => {
it('should select the display state', () => {
expect(selectDisplay(state)).toEqual(state.display)
})
})
describe('makeSelectCurrentLayersOperationInfo', () => {
let selectCurrentLayersOperationInfo = makeSelectCurrentLayersOperationInfo()
it('should select the current layer operation info', () => {
expect(selectCurrentLayersOperationInfo(mockDisplayState)).toEqual(
mockDisplayState.display.slideLayersOperationInfo[
mockDisplayState.display.currentSlideId
]
)
})
})
describe('makeSelectLayersBySlide', () => {
let selectLayersBySlide
beforeEach(() => {
selectLayersBySlide = makeSelectLayersBySlide()
selectLayersBySlide.resetRecomputations()
})
test('should return bizTable state', () => {
expect(selectLayersBySlide(state, mockSlideId)).toEqual(
state.display.slideLayers[mockSlideId]
)
})
})
describe('makeSelectLayerIdsBySlide', () => {
let selectLayerIdsBySlide
beforeEach(() => {
selectLayerIdsBySlide = makeSelectLayerIdsBySlide()
selectLayerIdsBySlide.resetRecomputations()
})
test('should return select layer ids by slide', () => {
expect(selectLayerIdsBySlide(mockDisplayState, mockSlideId)).toEqual(
Object.keys(mockDisplayState.display.slideLayers[mockSlideId]).map(
(id) => +id
)
)
})
})
describe('makeSelectCurrentLayerList', () => {
const selectCurrentLayerList = makeSelectCurrentLayerList()
test('should return select current layer list', () => {
expect(selectCurrentLayerList(mockDisplayState)).toEqual(
Object.values(
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
).sort((l1, l2) => l2.index - l1.index)
)
})
})
describe('makeSelectCurrentLayerIds', () => {
const selectCurrentLayerIds = makeSelectCurrentLayerIds()
test('should return select current layer ids', () => {
expect(selectCurrentLayerIds(mockDisplayState)).toEqual(
Object.keys(
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
).map((id) => +id)
)
})
})
describe('makeSelectSlideLayerContextValue', () => {
const selectSlideLayerContextValue = makeSelectSlideLayerContextValue()
test('should return select slide layer context value', () => {
expect(
selectSlideLayerContextValue(
mockDisplayState,
mockSlideId,
mockGraphLayerId,
mockEditing
)
).toEqual({
layer: mockGraphLayerFormed,
layerInfo: mockGraphLayerInfo
})
})
})
describe('makeSelectCurrentLayersMaxIndex', () => {
const selectCurrentLayersMaxIndex = makeSelectCurrentLayersMaxIndex()
const currentLayers =
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
const currentLayerList = Object.values(currentLayers).sort(
(l1, l2) => l2.index - l1.index
)
test('should return select current layer max index', () => {
expect(selectCurrentLayersMaxIndex(mockDisplayState)).toEqual(
currentLayerList[currentLayerList.length - 1].index
)
})
})
describe('makeSelectCurrentSelectedLayerList', () => {
const selectCurrentSelectedLayerList = makeSelectCurrentSelectedLayerList()
const currentLayers =
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
const currentLayerList = Object.values(currentLayers).sort(
(l1, l2) => l2.index - l1.index
)
const currentLayersOperationInfo =
mockDisplayState.display.slideLayersOperationInfo[
mockDisplayState.display.currentSlideId
]
test('should return select current select layer list', () => {
expect(selectCurrentSelectedLayerList(mockDisplayState)).toEqual(
currentLayerList.filter(
({ id }) => currentLayersOperationInfo[id].selected
)
)
})
})
describe('makeSelectCurrentEditLayerOperationInfo', () => {
const selectCurrentEditLayerOperationInfo = makeSelectCurrentEditLayerOperationInfo()
const currentEditOperationInfo =
mockDisplayState.display.slideLayersOperationInfo[
mockDisplayState.display.currentSlideId
]
test('should return select current edit layer operation info', () => {
expect(selectCurrentEditLayerOperationInfo(mockDisplayState)).toEqual(
Object.values(currentEditOperationInfo).filter(
(layerInfo) => layerInfo.editing
)
)
})
})
describe('makeSelectCurrentSelectedLayerIds', () => {
const selectCurrentSelectedLayerIds = makeSelectCurrentSelectedLayerIds()
const currentLayersOperationInfo =
mockDisplayState.display.slideLayersOperationInfo[
mockDisplayState.display.currentSlideId
]
test('should return select current select layer ids', () => {
expect(selectCurrentSelectedLayerIds(mockDisplayState)).toEqual(
Object.keys(currentLayersOperationInfo)
.filter((id) => currentLayersOperationInfo[+id].selected)
.map((id) => +id)
)
})
})
describe('makeSelectCurrentOperatingLayerList', () => {
const selectCurrentOperatingLayerList = makeSelectCurrentOperatingLayerList()
const currentLayers =
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
test('should return select current operating layer list', () => {
expect(
selectCurrentOperatingLayerList(mockDisplayState, mockGraphLayerId)
).toEqual([currentLayers[mockGraphLayerId]])
})
})
describe('makeSelectCurrentOtherLayerList', () => {
const selectCurrentOtherLayerList = makeSelectCurrentOtherLayerList()
const currentLayers =
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
test('should return select current other layer list', () => {
expect(
selectCurrentOtherLayerList(mockDisplayState, mockGraphLayerId)
).toEqual([currentLayers[mockGraphLayerId]])
})
})
describe('makeSelectCurrentOperateItemParams', () => {
const selectCurrentOperateItemParams = makeSelectCurrentOperateItemParams()
test('should return select current operate item params', () => {
expect(selectCurrentOperateItemParams(state)).toEqual(
state.display.operateItemParams
)
})
})
describe('makeSelectCurrentDisplayWidgets', () => {
const selectCurrentDisplayWidgets = makeSelectCurrentDisplayWidgets()
test('should return select current display widgets', () => {
expect(selectCurrentDisplayWidgets(state)).toEqual(
state.display.currentDisplayWidgets
)
})
})
describe('makeSelectClipboardLayers', () => {
const selectClipboardLayers = makeSelectClipboardLayers()
test('should return select clipboard layer list', () => {
expect(selectClipboardLayers(state)).toEqual(
state.display.clipboardLayers
)
})
})
describe('makeSelectCurrentDisplayShareToken', () => {
const selectCurrentDisplayShareToken = makeSelectCurrentDisplayShareToken()
test('should return select current display share token', () => {
expect(selectCurrentDisplayShareToken(state)).toEqual(
state.display.currentDisplayShareToken
)
})
})
describe('makeSelectCurrentDisplayPasswordShareToken', () => {
const selectCurrentDisplayPasswordShareToken = makeSelectCurrentDisplayPasswordShareToken()
test('should return select current display password share token', () => {
expect(selectCurrentDisplayPasswordShareToken(state)).toEqual(
state.display.currentDisplayPasswordShareToken
)
})
})
describe('makeSelectCurrentDisplayPasswordSharePassword', () => {
const selectCurrentDisplayPasswordSharePassword = makeSelectCurrentDisplayPasswordSharePassword()
test('should return select current display password share password', () => {
expect(selectCurrentDisplayPasswordSharePassword(state)).toEqual(
state.display.currentDisplayPasswordPassword
)
})
})
describe('makeSelectCurrentDisplayAuthorizedShareToken', () => {
const selectCurrentDisplayAuthorizedShareToken = makeSelectCurrentDisplayAuthorizedShareToken()
test('should return select current display authorized share token', () => {
expect(selectCurrentDisplayAuthorizedShareToken(state)).toEqual(
state.display.currentDisplayAuthorizedShareToken
)
})
})
describe('makeSelectSharePanel', () => {
const selectSharePanel = makeSelectSharePanel()
test('should return select share panel', () => {
expect(selectSharePanel(state)).toEqual(state.display.sharePanel)
})
})
describe('makeSelectDisplayLoading', () => {
const selectDisplayLoading = makeSelectDisplayLoading()
test('should return select display loading', () => {
expect(selectDisplayLoading(state)).toEqual(state.display.loading)
})
})
describe('makeSelectEditorBaselines', () => {
const selectEditorBaselines = makeSelectEditorBaselines()
test('should return select editor baselines', () => {
expect(selectEditorBaselines(state)).toEqual(
state.display.editorBaselines
)
})
})
describe('selectCurrentLayers', () => {
test('should return select current layers', () => {
expect(selectCurrentLayers(mockDisplayState)).toEqual(
mockDisplayState.display.slideLayers[
mockDisplayState.display.currentSlideId
]
)
})
})
}) | state = {
display: displayInitialState
}
}) |
if_let.rs | // if let は これが一つの式って考えたほうがよさそう
// let optional = Some(7);
// match optional {
// Some(i) => {
// println!("This is a really long string and `{:?}`", i);
// // ^ `i`をoption型からデストラクトするためだけに
// // インデントが一つ増えてしまっている。
// },
// _ => {},
// // ^ `match`は全ての型に対して網羅的でなくてはならないので必要。
// // 冗長に見えませんか?
// };
fn main() {
// All have type `Option<i32>`
// 全て`Option<i32>`型
let number = Some(7);
let letter: Option<i32> = None;
let emoticon: Option<i32> = None;
// The `if let` construct reads: "if `let` destructures `number` into
// `Some(i)`, evaluate the block (`{}`).
// `if let`文は以下と同じ意味.
//
// もしletがnumberをデストラクトした結果が`Some(i)`になるならば
// ブロック内(`{}`)を実行する。
if let Some(i) = number {
println!("Matched {:?}!", i);
}
// If you need to specify a failure, use an else:
// デストラクトした結果が`Some()`にならない場合の処理を明示したい場合、
// `else`を使用する。
if let Some(i) = letter {
println!("Matched {:?}!", i);
} else {
// Destructure failed. Change to the failure case.
// デストラクト失敗の場合。このブロック内を実行
println!("Didn't match a number. Let's go with a letter!");
}
// Provide an altered failing condition.
// デストラクト失敗時の処理を更に分岐させることもできる
let i_like_letters = false;
if let Some(i) = emoticon {
println!("Matched {:?}!", i);
// Destructure failed. Evaluate an `else if` condition to see if the
// alternate failure branch should be taken:
// デストラクト失敗。`else if`を評価し、処理をさらに分岐させる。
} else if i_like_letters {
println!("Didn't match a number. Let's go with a letter!");
} else {
// The condition evaluated false. This branch is the default:
// 今回は`else if`の評価がfalseなので、このブロック内がデフォルト
println!("I don't like letters. Let's go with an emoticon :)!");
}
let_like_match()
}
enum Foo {
Bar,
Baz,
Qux(u32),
}
fn let_like_match() {
// Create example variables
let a = Foo::Bar;
let b = Foo::Baz;
let c = Foo::Qux(100);
// Variable a matches Foo::Bar
if let Foo::Bar = a {
println!("a is foobar");
}
// Variable b does not match Foo::Bar
// So this will print nothing
if let Foo::Bar = b {
println!("b is foobar");
}
// Variable c matches Foo::Qux which has a value
// Similar to Some() in the previous example
if let Foo::Qux(value) = c {
println!("c is {}", value);
}
// Binding also works with `if let`
if let Foo::Qux(_value @ 100) = c {
println!("c is one hundred");
}
// if let はパラメータの値になってないenum列挙型でもマッチできる?
enum Foo2 {
Bar,
};
let a = Foo2::Bar;
// if Foo2::Bar == a は 失敗する
// なぜならインスタンスを同等化できないため
// if Foo2::Bar == a {
#[allow(irrefutable_let_patterns)]
if let Foo2::Bar = a {
// ^-- this causes a compile-time error. Use `if let` instead.
println!("a is foobar");
}
match a {
Foo2::Bar => {
println!("match a is foobar");
}
}
}
| ||
__main__.py | import sys
import importlib
import logging
from . import __version__
from .args import HelpfulArgumentParser
# List of all subcommands. A module of the given name must exist and define
# add_arguments() and main() functions. Documentation is taken from the first
# line of the module’s docstring.
COMMANDS = [
'phase',
'phaseg',
'stats',
'compare',
'hapcut2vcf',
'unphase',
'haplotag'
]
logger = logging.getLogger(__name__)
class NiceFormatter(logging.Formatter):
"""
Do not prefix "INFO:" to info-level log messages (but do it for all other
levels).
Based on http://stackoverflow.com/a/9218261/715090 .
"""
def format(self, record):
if record.levelno != logging.INFO:
record.msg = '{}: {}'.format(record.levelname, record.msg)
return super().format(record)
def setup_logging(debug):
"""
Set up logging. If debug is True, then DEBUG level messages are printed.
"""
handler = logging.StreamHandler()
handler.setFormatter(NiceFormatter())
root = logging.getLogger()
root.addHandler(handler)
root.setLevel(logging.DEBUG if debug else logging.INFO)
def ensure_pysam_version():
from pysam import __version__ as pysam_version
from distutils.version import LooseVersion
if LooseVersion(pysam_version) < LooseVersion("0.8.1"):
sys.exit("WhatsHap requires pysam >= 0.8.1")
def ma | rgv=sys.argv[1:]):
ensure_pysam_version()
parser = HelpfulArgumentParser(description=__doc__, prog='whatshap')
parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument('--debug', action='store_true', default=False,
help='Print debug messages')
subparsers = parser.add_subparsers()
for command_name in COMMANDS:
module = importlib.import_module('.' + command_name, 'whatshap')
subparser = subparsers.add_parser(command_name,
help=module.__doc__.split('\n')[1], description=module.__doc__)
subparser.set_defaults(module=module, subparser=subparser)
module.add_arguments(subparser)
args = parser.parse_args(argv)
setup_logging(args.debug)
if not hasattr(args, 'module'):
parser.error('Please provide the name of a subcommand to run')
else:
module = args.module
if hasattr(args.module, 'validate'):
subparser = args.subparser
args.module.validate(args, subparser)
del args.subparser
del args.module
del args.debug
module.main(args)
if __name__ == '__main__':
main()
| in(a |
api_op_DeleteClientVpnEndpoint.go | // Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package ec2
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/internal/awsutil"
)
// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteClientVpnEndpointRequest
type DeleteClientVpnEndpointInput struct {
_ struct{} `type:"structure"`
// The ID of the Client VPN to be deleted.
//
// ClientVpnEndpointId is a required field
ClientVpnEndpointId *string `type:"string" required:"true"`
// Checks whether you have the required permissions for the action, without
// actually making the request, and provides an error response. If you have
// the required permissions, the error response is DryRunOperation. Otherwise,
// it is UnauthorizedOperation.
DryRun *bool `type:"boolean"`
}
// String returns the string representation
func (s DeleteClientVpnEndpointInput) String() string {
return awsutil.Prettify(s)
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *DeleteClientVpnEndpointInput) Validate() error {
invalidParams := aws.ErrInvalidParams{Context: "DeleteClientVpnEndpointInput"}
if s.ClientVpnEndpointId == nil |
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteClientVpnEndpointResult
type DeleteClientVpnEndpointOutput struct {
_ struct{} `type:"structure"`
// The current state of the Client VPN endpoint.
Status *VpnEndpointStatus `locationName:"status" type:"structure"`
}
// String returns the string representation
func (s DeleteClientVpnEndpointOutput) String() string {
return awsutil.Prettify(s)
}
const opDeleteClientVpnEndpoint = "DeleteClientVpnEndpoint"
// DeleteClientVpnEndpointRequest returns a request value for making API operation for
// Amazon Elastic Compute Cloud.
//
// Deletes the specified Client VPN endpoint. You must disassociate all target
// networks before you can delete a Client VPN endpoint.
//
// // Example sending a request using DeleteClientVpnEndpointRequest.
// req := client.DeleteClientVpnEndpointRequest(params)
// resp, err := req.Send(context.TODO())
// if err == nil {
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteClientVpnEndpoint
func (c *Client) DeleteClientVpnEndpointRequest(input *DeleteClientVpnEndpointInput) DeleteClientVpnEndpointRequest {
op := &aws.Operation{
Name: opDeleteClientVpnEndpoint,
HTTPMethod: "POST",
HTTPPath: "/",
}
if input == nil {
input = &DeleteClientVpnEndpointInput{}
}
req := c.newRequest(op, input, &DeleteClientVpnEndpointOutput{})
return DeleteClientVpnEndpointRequest{Request: req, Input: input, Copy: c.DeleteClientVpnEndpointRequest}
}
// DeleteClientVpnEndpointRequest is the request type for the
// DeleteClientVpnEndpoint API operation.
type DeleteClientVpnEndpointRequest struct {
*aws.Request
Input *DeleteClientVpnEndpointInput
Copy func(*DeleteClientVpnEndpointInput) DeleteClientVpnEndpointRequest
}
// Send marshals and sends the DeleteClientVpnEndpoint API request.
func (r DeleteClientVpnEndpointRequest) Send(ctx context.Context) (*DeleteClientVpnEndpointResponse, error) {
r.Request.SetContext(ctx)
err := r.Request.Send()
if err != nil {
return nil, err
}
resp := &DeleteClientVpnEndpointResponse{
DeleteClientVpnEndpointOutput: r.Request.Data.(*DeleteClientVpnEndpointOutput),
response: &aws.Response{Request: r.Request},
}
return resp, nil
}
// DeleteClientVpnEndpointResponse is the response type for the
// DeleteClientVpnEndpoint API operation.
type DeleteClientVpnEndpointResponse struct {
*DeleteClientVpnEndpointOutput
response *aws.Response
}
// SDKResponseMetdata returns the response metadata for the
// DeleteClientVpnEndpoint request.
func (r *DeleteClientVpnEndpointResponse) SDKResponseMetdata() *aws.Response {
return r.response
}
| {
invalidParams.Add(aws.NewErrParamRequired("ClientVpnEndpointId"))
} |
doc_extractor.go | // Copyright 2020 VMware, Inc.
// SPDX-License-Identifier: Apache-2.0
package workspace
import (
"fmt"
"github.com/vmware-tanzu/carvel-ytt/pkg/template"
"github.com/vmware-tanzu/carvel-ytt/pkg/yamlmeta"
"github.com/vmware-tanzu/carvel-ytt/pkg/yamltemplate"
)
type DocExtractor struct {
DocSet *yamlmeta.DocumentSet
}
func (v DocExtractor) Extract(annName template.AnnotationName) ([]*yamlmeta.Document,
[]*yamlmeta.Document, error) {
err := v.checkNonDocs(v.DocSet, annName)
if err != nil {
return nil, nil, err
}
matchedDocs, nonMatchedDocs, err := v.extract(v.DocSet, annName)
if err != nil {
return nil, nil, err
}
return matchedDocs, nonMatchedDocs, nil
}
func (v DocExtractor) extract(docSet *yamlmeta.DocumentSet,
annName template.AnnotationName) ([]*yamlmeta.Document, []*yamlmeta.Document, error) {
var matchedDocs []*yamlmeta.Document
var nonMatchedDocs []*yamlmeta.Document
for _, doc := range docSet.Items {
var hasMatchingAnn bool
for _, comment := range doc.GetComments() {
// TODO potentially use template.NewAnnotations(doc).Has(yttoverlay.AnnotationMatch)
// however if doc was not processed by the template, it wont have any annotations set
ann, err := yamltemplate.NewTemplateAnnotationFromYAMLComment(comment, doc.GetPosition(), yamltemplate.MetasOpts{IgnoreUnknown: true})
if err != nil {
return nil, nil, err
}
if ann.Name == annName {
if hasMatchingAnn |
hasMatchingAnn = true
}
}
if hasMatchingAnn {
matchedDocs = append(matchedDocs, doc)
} else {
nonMatchedDocs = append(nonMatchedDocs, doc)
}
}
return matchedDocs, nonMatchedDocs, nil
}
func (v DocExtractor) checkNonDocs(val interface{}, annName template.AnnotationName) error {
node, ok := val.(yamlmeta.Node)
if !ok {
return nil
}
for _, comment := range node.GetComments() {
ann, err := yamltemplate.NewTemplateAnnotationFromYAMLComment(comment, node.GetPosition(), yamltemplate.MetasOpts{IgnoreUnknown: true})
if err != nil {
return err
}
if ann.Name == annName {
// TODO check for annotation emptiness
_, isDoc := node.(*yamlmeta.Document)
if !isDoc {
errMsg := "Found @%s on %s (%s); only documents (---) can be annotated with @%s"
return fmt.Errorf(errMsg, annName, yamlmeta.TypeName(node), node.GetPosition().AsCompactString(), annName)
}
}
}
for _, childVal := range node.GetValues() {
err := v.checkNonDocs(childVal, annName)
if err != nil {
return err
}
}
return nil
}
| {
return nil, nil, fmt.Errorf("%s annotation may only be used once per document", annName)
} |
Welcome.tsx | import React, { useState, useEffect } from 'react';
import { useGlobal } from 'reactn';
import Auth from '../auth/Auth';
import { Link, Redirect } from 'react-router-dom';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { IState, IWelcomeStrings, User, Organization } from '../model';
import localStrings from '../selector/localize';
import { withData } from 'react-orbitjs';
import { Schema, KeyMap, QueryBuilder } from '@orbit/data';
import Store from '@orbit/store';
import { Theme, withStyles, WithStyles, Button } from '@material-ui/core';
import Paper from '@material-ui/core/Paper';
import Typography from '@material-ui/core/Typography';
import TranscriberBar from '../components/TranscriberBar';
import * as action from '../actions';
const styles = (theme: Theme) => ({
root: {
width: '100%',
},
container: {
display: 'flex',
justifyContent: 'center'
},
paper: theme.mixins.gutters({
paddingTop: 16,
paddingBottom: 16,
marginTop: theme.spacing.unit * 3,
width: '30%',
display: 'flex',
flexDirection: 'column',
alignContent: 'center',
[theme.breakpoints.down('md')]: {
width: '100%',
},
}),
dialogHeader: theme.mixins.gutters({
display: 'flex',
flexDirection: 'row',
justifyContent: 'center'
}),
actions: theme.mixins.gutters({
paddingTop: 16,
paddingBottom: 16,
marginTop: theme.spacing.unit * 3,
display: 'flex',
flexDirection: 'row',
justifyContent: 'center'
}),
text: theme.mixins.gutters({
paddingTop: theme.spacing.unit * 2,
textAlign: 'center',
}),
button: theme.mixins.gutters({
marginRight: theme.spacing.unit
}),
});
interface IStateProps {
t: IWelcomeStrings;
orbitLoaded: boolean;
};
interface IDispatchProps {
fetchLocalization: typeof action.fetchLocalization;
setLanguage: typeof action.setLanguage;
fetchOrbitData: typeof action.fetchOrbitData;
};
interface IRecordProps {
users: Array<User>;
}
interface IProps extends IStateProps, IRecordProps, IDispatchProps, WithStyles<typeof styles>{
auth: Auth
};
export function | (props: IProps) {
const { classes, orbitLoaded, auth, t, users } = props;
const { fetchOrbitData, fetchLocalization, setLanguage } = props;
const { isAuthenticated } = auth;
const [dataStore] = useGlobal('dataStore');
const [schema] = useGlobal('schema');
const [keyMap] = useGlobal('keyMap');
const [user, setUser] = useGlobal('user');
const [organization] = useGlobal('organization');
/* eslint-disable @typescript-eslint/no-unused-vars */
const [initials, setInitials] = useGlobal('initials');
const [orgName, setOrgName] = useState('');
/* eslint-enable @typescript-eslint/no-unused-vars */
useEffect(() => {
setLanguage(navigator.language.split('-')[0]);
fetchLocalization();
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [])
useEffect(() => {
if (user === null) {
if (users.length === 1) {
setUser(users[0].id)
setInitials(users[0].attributes.name.trim().split(' ').map((s: string) => s.slice(0,1).toLocaleUpperCase()).join(''))
}
}
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [user, users])
useEffect(() => {
if (organization !== null) {
(dataStore as Store).query(q => q.findRecord({type: 'organization', id: organization as string}))
.then((organizationRec: Organization) => {
setOrgName(organizationRec.attributes.name);
})
}
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, [organization])
if (!isAuthenticated()) return <Redirect to="/" />;
if (!orbitLoaded) {
fetchOrbitData(schema as Schema, dataStore as Store, keyMap as KeyMap, auth);
};
if (organization === null) return <Redirect to="/organization" />;
return <Redirect to="/admin" />;
/* eslint-disable no-unreachable */
return (
<div className={classes.root}>
<TranscriberBar {...props} search={false} />
<div className={classes.container}>
<Paper className={classes.paper}>
<Typography variant="h4" className={classes.dialogHeader}>
{t.thanksSigningUp}
</Typography>
<Typography variant="h5" className={classes.text}>
{t.StartTranscribingImmediately}
</Typography>
<div className={classes.actions}>
<Button variant="contained" className={classes.button}>
{t.transcriberWeb}
</Button>
<Button variant="contained" className={classes.button}>
{t.transcriberDesktop}
</Button>
</div>
<Typography variant="h5" className={classes.text}>
{t.ConfigureTranscriptionProject}
</Typography>
<div className={classes.actions}>
<Link to="/admin">
<Button variant="contained" className={classes.button}>
{t.transcriberAdmin}
</Button>
</Link>
</div>
</Paper>
</div>
</div>
);
};
const mapStateToProps = (state: IState): IStateProps => ({
t: localStrings(state, {layout: "welcome"}),
orbitLoaded: state.orbit.loaded,
});
const mapDispatchToProps = (dispatch: any): IDispatchProps => ({
...bindActionCreators({
fetchLocalization: action.fetchLocalization,
setLanguage: action.setLanguage,
fetchOrbitData: action.fetchOrbitData,
}, dispatch),
});
const mapRecordsToProps = {
users: (q: QueryBuilder) => q.findRecords('user')
};
export default withStyles(styles, { withTheme: true })(
withData(mapRecordsToProps)(
connect(mapStateToProps, mapDispatchToProps)(Welcome) as any
) as any
) as any;
| Welcome |
site-area-consumption-chart.component.ts | import { AfterViewInit, Component, ElementRef, Input, OnInit, ViewChild } from '@angular/core';
import { AbstractControl, FormControl, Validators } from '@angular/forms';
import { TranslateService } from '@ngx-translate/core';
import { Chart, ChartData, ChartDataset, ChartOptions, Color, TooltipItem } from 'chart.js';
import * as moment from 'moment';
import { AppUnitPipe } from 'shared/formatters/app-unit.pipe';
import { DataResultAuthorizationActions } from 'types/Authorization';
import { ConsumptionChartAxis, ConsumptionChartDatasetOrder } from 'types/Chart';
import { CentralServerService } from '../../../../../services/central-server.service';
import { SpinnerService } from '../../../../../services/spinner.service';
import { AppDatePipe } from '../../../../../shared/formatters/app-date.pipe';
import { AppDecimalPipe } from '../../../../../shared/formatters/app-decimal.pipe';
import { AppDurationPipe } from '../../../../../shared/formatters/app-duration.pipe';
import { SiteArea, SiteAreaConsumption, SiteAreaValueTypes } from '../../../../../types/SiteArea';
import { ConsumptionUnit } from '../../../../../types/Transaction';
import { Utils } from '../../../../../utils/Utils';
@Component({
selector: 'app-site-area-chart',
templateUrl: 'site-area-consumption-chart.component.html',
})
export class | implements OnInit, AfterViewInit {
@Input() public siteArea!: SiteArea;
@Input() public siteAreasAuthorizations: DataResultAuthorizationActions;
@ViewChild('primary', { static: true }) public primaryElement!: ElementRef;
@ViewChild('secondary', { static: true }) public secondaryElement!: ElementRef;
@ViewChild('warning', { static: true }) public warningElement!: ElementRef;
@ViewChild('success', { static: true }) public successElement!: ElementRef;
@ViewChild('danger', { static: true }) public dangerElement!: ElementRef;
@ViewChild('chart', { static: true }) public chartElement!: ElementRef;
public siteAreaConsumption!: SiteAreaConsumption;
public selectedUnit = ConsumptionUnit.KILOWATT;
public dateControl!: AbstractControl;
public startDate = moment().startOf('d').toDate();
public endDate = moment().endOf('d').toDate();
private graphCreated = false;
private lineTension = 0;
private data: ChartData = {
labels: [],
datasets: [],
};
private options!: ChartOptions;
private chart!: Chart;
private assetConsumptionsInstantPowerColor!: string;
private assetProductionsInstantPowerColor!: string;
private chargingStationsConsumptionsInstantPowerColor!: string;
private netInstantPowerColor!: string;
private limitColor!: string;
private defaultColor!: string;
private backgroundColor!: string;
private firstLabel: number;
private visibleDatasets = [
ConsumptionChartDatasetOrder.NET_CONSUMPTION_WATTS
];
private gridDisplay = {
[ConsumptionChartAxis.POWER]: true,
[ConsumptionChartAxis.AMPERAGE]: true,
};
// eslint-disable-next-line no-useless-constructor
public constructor(
private spinnerService: SpinnerService,
private centralServerService: CentralServerService,
private translateService: TranslateService,
private datePipe: AppDatePipe,
private durationPipe: AppDurationPipe,
private decimalPipe: AppDecimalPipe,
private unitPipe: AppUnitPipe) {
}
public ngOnInit() {
// Date control
this.dateControl = new FormControl('dateControl',
Validators.compose([
Validators.required,
]));
this.dateControl.setValue(this.startDate);
if(this.siteAreasAuthorizations.canCreate && this.siteArea.canRead && this.siteArea.canUpdate && this.siteArea.canDelete){
this.visibleDatasets.push(...[
ConsumptionChartDatasetOrder.LIMIT_WATTS,
ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_WATTS,
ConsumptionChartDatasetOrder.ASSET_PRODUCTION_WATTS,
ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_WATTS
]);
}
}
public ngAfterViewInit() {
this.netInstantPowerColor = this.getStyleColor(this.primaryElement.nativeElement);
this.assetProductionsInstantPowerColor = this.getStyleColor(this.successElement.nativeElement);
this.chargingStationsConsumptionsInstantPowerColor = this.getStyleColor(this.secondaryElement.nativeElement);
this.assetConsumptionsInstantPowerColor = this.getStyleColor(this.warningElement.nativeElement);
this.limitColor = this.getStyleColor(this.dangerElement.nativeElement);
this.defaultColor = this.getStyleColor(this.chartElement.nativeElement);
this.backgroundColor = Utils.toRgba('#FFFFFF', 1);
if (this.canDisplayGraph()) {
this.prepareOrUpdateGraph();
} else {
this.refresh();
}
}
public refresh() {
this.spinnerService.show();
// Change Date for testing e.g.:
this.centralServerService.getSiteAreaConsumption(this.siteArea.id, this.startDate, this.endDate)
.subscribe((siteAreaData) => {
this.spinnerService.hide();
this.siteAreaConsumption = siteAreaData;
this.prepareOrUpdateGraph();
}, (error) => {
this.spinnerService.hide();
delete this.siteAreaConsumption;
});
}
public unitChanged(key: ConsumptionUnit) {
this.spinnerService.show();
this.selectedUnit = key;
this.updateVisibleDatasets();
this.createGraphData();
this.prepareOrUpdateGraph();
this.spinnerService.hide();
}
public dateFilterChanged(value: Date) {
if (value) {
this.startDate = moment(value).startOf('d').toDate();
this.endDate = moment(value).endOf('d').toDate();
this.refresh();
}
}
private updateVisibleDatasets(){
this.visibleDatasets = [];
this.data.datasets.forEach(dataset => {
if(!dataset.hidden){
this.visibleDatasets.push(dataset.order);
}
});
}
private getStyleColor(element: Element): string {
const style = getComputedStyle(element);
return style && style.color ? style.color : '';
}
private prepareOrUpdateGraph() {
if (this.canDisplayGraph()) {
if (!this.graphCreated) {
this.graphCreated = true;
this.options = this.createOptions();
this.createGraphData();
this.chart = new Chart(this.chartElement.nativeElement.getContext('2d'), {
type: 'bar',
data: this.data,
options: this.options,
});
}
this.refreshDataSets();
this.chart.update();
}
}
// eslint-disable-next-line complexity
private createGraphData() {
const datasets: ChartDataset[] = [];
// Asset Consumption Instant Amps/Power
datasets.push({
type: 'line',
hidden: this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_AMPS) === -1
&& this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_WATTS) === -1,
data: [],
yAxisID: this.selectedUnit === ConsumptionUnit.AMPERE ? ConsumptionChartAxis.AMPERAGE : ConsumptionChartAxis.POWER,
lineTension: this.lineTension,
...Utils.formatLineColor(this.assetConsumptionsInstantPowerColor),
label: this.translateService.instant((this.selectedUnit === ConsumptionUnit.AMPERE) ?
'organization.graph.asset_consumption_amps' : 'organization.graph.asset_consumption_watts'),
order: (this.selectedUnit === ConsumptionUnit.AMPERE) ?
ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_AMPS :
ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_WATTS,
fill: 'origin',
});
// Charging Stations Consumption Instant Amps/Power
datasets.push({
type: 'line',
hidden: this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_WATTS) === -1
&& this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_AMPS) === -1,
data: [],
yAxisID: this.selectedUnit === ConsumptionUnit.AMPERE ? ConsumptionChartAxis.AMPERAGE : ConsumptionChartAxis.POWER,
lineTension: this.lineTension,
...Utils.formatLineColor(this.chargingStationsConsumptionsInstantPowerColor),
label: this.translateService.instant((this.selectedUnit === ConsumptionUnit.AMPERE) ?
'organization.graph.charging_station_consumption_amps' : 'organization.graph.charging_station_consumption_watts'),
order: (this.selectedUnit === ConsumptionUnit.AMPERE) ?
ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_AMPS :
ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_WATTS,
fill: 'origin',
});
// Asset Production Instant Amps/Power
datasets.push({
type: 'line',
hidden: this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.ASSET_PRODUCTION_WATTS) === -1
&& this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.ASSET_PRODUCTION_AMPS) === -1,
data: [],
yAxisID: this.selectedUnit === ConsumptionUnit.AMPERE ? ConsumptionChartAxis.AMPERAGE : ConsumptionChartAxis.POWER,
lineTension: this.lineTension,
...Utils.formatLineColor(this.assetProductionsInstantPowerColor),
label: this.translateService.instant((this.selectedUnit === ConsumptionUnit.AMPERE) ?
'organization.graph.asset_production_amps' : 'organization.graph.asset_production_watts'),
order: (this.selectedUnit === ConsumptionUnit.AMPERE) ?
ConsumptionChartDatasetOrder.ASSET_PRODUCTION_AMPS :
ConsumptionChartDatasetOrder.ASSET_PRODUCTION_WATTS,
fill: 'origin',
});
// Net Instant Amps/Power
datasets.push({
type: 'line',
hidden: this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.NET_CONSUMPTION_AMPS) === -1
&& this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.NET_CONSUMPTION_WATTS) === -1,
data: [],
yAxisID: this.selectedUnit === ConsumptionUnit.AMPERE ? ConsumptionChartAxis.AMPERAGE : ConsumptionChartAxis.POWER,
lineTension: this.lineTension,
...Utils.formatLineColor(this.netInstantPowerColor),
label: this.translateService.instant((this.selectedUnit === ConsumptionUnit.AMPERE) ?
'organization.graph.net_consumption_amps' : 'organization.graph.net_consumption_watts'),
order: (this.selectedUnit === ConsumptionUnit.AMPERE) ?
ConsumptionChartDatasetOrder.NET_CONSUMPTION_AMPS :
ConsumptionChartDatasetOrder.NET_CONSUMPTION_WATTS,
fill: 'origin',
});
// Limit Amps/Power
datasets.push({
type: 'line',
hidden: this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.LIMIT_AMPS) === -1
&& this.visibleDatasets.indexOf(ConsumptionChartDatasetOrder.LIMIT_WATTS) === -1,
data: [],
yAxisID: this.selectedUnit === ConsumptionUnit.AMPERE ? ConsumptionChartAxis.AMPERAGE : ConsumptionChartAxis.POWER,
lineTension: this.lineTension,
...Utils.formatLineColor(this.limitColor),
label: this.translateService.instant((this.selectedUnit === ConsumptionUnit.AMPERE) ?
'organization.graph.limit_amps' : 'organization.graph.limit_watts'),
order: (this.selectedUnit === ConsumptionUnit.AMPERE) ?
ConsumptionChartDatasetOrder.LIMIT_AMPS :
ConsumptionChartDatasetOrder.LIMIT_WATTS,
fill: 'origin',
});
// Assign
this.data.labels = [];
this.data.datasets = datasets;
}
private getDataSetByOrder(order: number): number[] | null {
const dataSet = this.data.datasets.find((d) => d.order === order);
return dataSet ? dataSet.data as number[] : null;
}
private canDisplayGraph() {
return this.siteAreaConsumption?.values?.length > 0;
}
private refreshDataSets() {
for (const key of Object.keys(this.data.datasets)) {
this.data.datasets[key].data = [];
}
const labels: number[] = [];
const assetConsumptionsInstantPowerDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_WATTS);
const assetConsumptionsInstantAmpsDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_AMPS);
const assetProductionsInstantPowerDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.ASSET_PRODUCTION_WATTS);
const assetProductionsInstantAmpsDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.ASSET_PRODUCTION_AMPS);
const chargingStationsInstantPowerDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_WATTS);
const chargingStationsInstantAmpsDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_AMPS);
const netConsumptionsInstantPowerDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.NET_CONSUMPTION_WATTS);
const netConsumptionsInstantAmpsDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.NET_CONSUMPTION_AMPS);
const limitWattsDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.LIMIT_WATTS);
const limitAmpsDataSet = this.getDataSetByOrder(ConsumptionChartDatasetOrder.LIMIT_AMPS);
for (const consumption of this.siteAreaConsumption.values) {
const dateTime = new Date(consumption.startedAt);
labels.push(dateTime.getTime());
if (assetConsumptionsInstantPowerDataSet) {
assetConsumptionsInstantPowerDataSet.push(consumption[SiteAreaValueTypes.ASSET_CONSUMPTION_WATTS]);
}
if (assetConsumptionsInstantAmpsDataSet) {
assetConsumptionsInstantAmpsDataSet.push(consumption[SiteAreaValueTypes.ASSET_CONSUMPTION_AMPS]);
}
if (assetProductionsInstantPowerDataSet) {
assetProductionsInstantPowerDataSet.push(consumption[SiteAreaValueTypes.ASSET_PRODUCTION_WATTS]);
}
if (assetProductionsInstantAmpsDataSet) {
assetProductionsInstantAmpsDataSet.push(consumption[SiteAreaValueTypes.ASSET_PRODUCTION_AMPS]);
}
if (chargingStationsInstantPowerDataSet) {
chargingStationsInstantPowerDataSet.push(consumption[SiteAreaValueTypes.CHARGING_STATION_CONSUMPTION_WATTS]);
}
if (chargingStationsInstantAmpsDataSet) {
chargingStationsInstantAmpsDataSet.push(consumption[SiteAreaValueTypes.CHARGING_STATION_CONSUMPTION_AMPS]);
}
if (netConsumptionsInstantPowerDataSet) {
netConsumptionsInstantPowerDataSet.push(consumption[SiteAreaValueTypes.NET_CONSUMPTION_WATTS]);
}
if (netConsumptionsInstantAmpsDataSet) {
netConsumptionsInstantAmpsDataSet.push(consumption[SiteAreaValueTypes.NET_CONSUMPTION_AMPS]);
}
if (limitWattsDataSet) {
limitWattsDataSet.push(consumption.limitWatts);
}
if (limitAmpsDataSet) {
limitAmpsDataSet.push(consumption.limitAmps);
}
}
this.data.labels = labels;
this.firstLabel = labels[0];
}
private createOptions(): ChartOptions {
const options: ChartOptions = {
animation: {
duration: 0,
},
responsive: true,
maintainAspectRatio: false,
plugins: {
legend: {
position: 'bottom',
labels: {
color: this.defaultColor,
},
onHover: (e, legendItem, legend) => {
const status = legend.chart.data.datasets[legendItem.datasetIndex].hidden;
if(!status){
legend.chart.data.datasets.forEach((dataset) => dataset.borderWidth = 1);
legend.chart.data.datasets[legendItem.datasetIndex].borderWidth = 5;
legend.chart.update();
}
},
onLeave: (e, legendItem, legend) => {
legend.chart.data.datasets.forEach((dataset) => dataset.borderWidth = 3);
legend.chart.update();
},
onClick: (e, legendItem, legend) => {
const status = legend.chart.data.datasets[legendItem.datasetIndex].hidden;
legend.chart.data.datasets[legendItem.datasetIndex].hidden = !status;
this.data.datasets[legendItem.datasetIndex].hidden = !status;
legend.chart.update();
}
},
tooltip: {
bodySpacing: 5,
mode: 'index',
position: 'nearest',
intersect: false,
callbacks: {
labelColor: (context) => ({
borderColor: context.dataset.borderColor as Color,
backgroundColor: context.dataset.borderColor as Color,
dash: context.dataset.borderDash,
}),
// eslint-disable-next-line complexity
label: (context) => {
const dataset = context.dataset;
const value = dataset.data[context.dataIndex] as number;
const label = context.dataset.label;
let tooltipLabel = '';
switch (context.dataset.order) {
case ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_WATTS:
tooltipLabel = ' ' + this.decimalPipe.transform(value / 1000, '2.0-0') + 'kW';
break;
case ConsumptionChartDatasetOrder.ASSET_CONSUMPTION_AMPS:
tooltipLabel = ' ' + this.decimalPipe.transform(value, '2.0-0') + 'A';
break;
case ConsumptionChartDatasetOrder.ASSET_PRODUCTION_WATTS:
tooltipLabel = ' ' + this.decimalPipe.transform(value / 1000, '2.0-0') + 'kW';
break;
case ConsumptionChartDatasetOrder.ASSET_PRODUCTION_AMPS:
tooltipLabel = ' ' + this.decimalPipe.transform(value, '2.0-0') + 'A';
break;
case ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_WATTS:
tooltipLabel = ' ' + this.decimalPipe.transform(value / 1000, '2.0-0') + 'kW';
break;
case ConsumptionChartDatasetOrder.CHARGING_STATION_CONSUMPTION_AMPS:
tooltipLabel = ' ' + this.decimalPipe.transform(value, '2.0-0') + 'A';
break;
case ConsumptionChartDatasetOrder.NET_CONSUMPTION_WATTS:
tooltipLabel = ' ' + this.decimalPipe.transform(value / 1000, '2.0-0') + 'kW';
break;
case ConsumptionChartDatasetOrder.NET_CONSUMPTION_AMPS:
tooltipLabel = ' ' + this.decimalPipe.transform(value, '2.0-0') + 'A';
break;
case ConsumptionChartDatasetOrder.LIMIT_WATTS:
tooltipLabel = ' ' + this.decimalPipe.transform(value / 1000, '2.0-0') + 'kW';
break;
case ConsumptionChartDatasetOrder.LIMIT_AMPS:
tooltipLabel = ' ' + this.decimalPipe.transform(value, '2.0-0') + 'A';
break;
default:
tooltipLabel = value + '';
}
return `${label}: ${tooltipLabel}`;
},
title: (tooltipItems) => {
const firstDate = new Date(this.firstLabel);
const currentDate = new Date(tooltipItems[0].parsed.x);
return this.datePipe.transform(currentDate) + ' - ' + this.durationPipe.transform((currentDate.getTime() - firstDate.getTime()) / 1000);
},
},
},
},
hover: {
mode: 'index',
intersect: false,
},
scales: {
[ConsumptionChartAxis.X]:{
type: 'time',
time: {
tooltipFormat: moment.localeData().longDateFormat('LT'),
unit: 'minute',
displayFormats: {
second: moment.localeData().longDateFormat('LTS'),
minute: moment.localeData().longDateFormat('LT'),
},
},
grid: {
display: true,
color: 'rgba(0,0,0,0.2)',
},
ticks: {
autoSkip: true,
color: this.defaultColor,
},
},
[ConsumptionChartAxis.POWER]:{
type: 'linear',
position: 'left',
display: 'auto',
ticks: {
callback: (value: number) => this.unitPipe.transform(value, 'W', 'kW', true, 1, 0, 1),
color: this.defaultColor,
},
grid: {
display: true,
drawOnChartArea: this.gridDisplay[ConsumptionChartAxis.POWER],
color: 'rgba(0,0,0,0.2)',
},
title: {
display: true,
text: this.translateService.instant('transactions.consumption') + ' (W)',
}
},
[ConsumptionChartAxis.AMPERAGE]: {
type: 'linear',
position: 'left',
display: 'auto',
grid: {
display: true,
drawOnChartArea: this.gridDisplay[ConsumptionChartAxis.AMPERAGE],
color: 'rgba(0,0,0,0.2)',
},
ticks: {
callback: (value: number) => this.decimalPipe.transform(value, '1.0-1') + ' A',
color: this.defaultColor,
},
title: {
display: true,
text: this.translateService.instant('transactions.consumption') + ' (A)',
}
},
},
};
return options;
}
}
| SiteAreaConsumptionChartComponent |
Game.py | import random
from ..CardEnum import *
from copy import deepcopy
class GameResult:
def __init__(self,winner,turns):
self.winner = winner
self.turns = turns
class Game:
def __init__(self, players, engine, deck, logger):
random.seed()
self.winner = None
self.total_turns = 0
self._logger = logger
self._players = players
self._playerCount = len(self._players)
self._engine = engine
self._initialDeck = deepcopy(deck)
self._currentDeck = deck
self._init_game()
def | (self, msg):
self._logger.debug(msg)
def _init_game(self):
self._currentPlayer = 0
self._turn = 0
self.winner = None
self.total_turns = 0
init_state = self._engine.initialstate()
for player in self._players:
player.initialstate(deepcopy(init_state))
def run(self):
self._log("Starting a game.")
game_finished = False
while not game_finished:
self._execute_turn()
self._log("")
game_finished = self._engine.winconditionmet(self._players)
self.winner = self._engine.get_winner(self._players)
self.total_turns = self._turn
def _execute_turn(self):
player = self._players[self._currentPlayer]
self._log("\tTurn {0}, Player {1}".format(self._turn, player.name))
# Ask current player for roll.
dicecnt = player.get_number_toroll()
# roll
rollnum = self._roll(dicecnt)
self._log("\t\tPlayer rolls {0} dice, and gets a {1}".format(dicecnt, rollnum))
# use engine to determine earning.
# - Steal first
self._take_money_if_necessary(rollnum)
# - Then Earn
self._award_money_if_necessary(rollnum)
state = player.get_currentstate()
self._log("\t\tAfter money has changed hands, the player now has:{0}".format(state.Money))
# ask current player for purchase
card = player.get_card_topurchase(self._currentDeck.get_availablecards())
# make purchase
if card is not CardEnum.NoCard:
if player.get_currentstate().Money >= CardCosts[card]:
player.deduct_money(CardCosts[card])
self._currentDeck.request_card(card)
player.award_card(card)
self._log("\tThe player purchases {0}".format(card))
# increment current player (increment turn if back to first player)
self._currentPlayer = self._get_next_player()
if self._currentPlayer == 0:
self._turn += 1
@staticmethod
def _roll(dice):
"""Rolls the designated number of 6 sided dice. Returns sum of dice."""
result = 0
i = 0
while i < dice:
result += random.randint(1, 6)
i += 1
return result
def _take_money_if_necessary(self, roll):
"""Iterates thru all other players to determine if money is owed by rolling player."""
currentPlayer = self._players[self._currentPlayer]
nextIdx = self._get_next_player()
canContinue= True
self._log("")
while canContinue:
# - Determine Cards activated on other players
nextPlayer = self._players[nextIdx]
owed = self._engine.steals_money(nextPlayer.get_currentstate(), roll)
self._log("\t\tPlayer {0} owes {1} {2} money.".format(currentPlayer.name, nextPlayer.name, owed))
# - Attempt to aware going around to next
available = currentPlayer.deduct_money(owed)
if available is None:
self._log("\t\t\t But had no money left...")
canContinue = False
continue
else:
nextPlayer.award_money(available)
if owed != available:
self._log("\t\t\t But could only pay {0}...".format(available))
canContinue = False
continue
self._log("\t\t\t and paid in full.")
nextIdx = self._get_next_player(nextIdx)
if nextIdx == self._currentPlayer:
canContinue = False
def _get_next_player(self, cur_idx=None):
idx = cur_idx
if cur_idx is None:
idx = self._currentPlayer
return (idx + 1) % self._playerCount
def _award_money_if_necessary(self, roll):
"""Iterates thru all players and awards money from bank as applicable."""
# Iterate thru other players first
self._log("")
next_idx = self._get_next_player(self._currentPlayer)
while next_idx != self._currentPlayer:
player = self._players[next_idx]
earned = self._engine.earns_money(player.get_currentstate(), roll, False)
# False because it is not the players turn
self._log("\t\t{0} earned {1} for their blues.".format(player.name, earned))
player.award_money(earned)
next_idx = self._get_next_player(next_idx)
# Award money to current player
player = self._players[self._currentPlayer]
earned = self._engine.earns_money(player.get_currentstate(), roll, True)
self._log("\t\t{0} earned {1} for their blues and greens.".format(player.name, earned))
player.award_money(earned)
def reset(self):
self._currentDeck = deepcopy(self._initialDeck)
self._init_game()
self._randomize_first_player()
def _randomize_first_player(self):
self._currentPlayer = random.randint(0, self._playerCount-1)
def get_players(self):
result = []
for player in self._players:
result.append(player.name)
return result
def get_result(self):
return GameResult(self.winner, self.total_turns)
| _log |
model.rs | //! Models for leaderboard endpoint.
use serde::Deserialize;
/// The leaderboard.
#[derive(Deserialize, Clone, Debug)]
#[serde(transparent)]
#[non_exhaustive]
pub struct | {
/// Players in the leaderboard, sorted ascending
pub players: Vec<Player>,
}
/// Deserialized player.
#[derive(Deserialize, Clone, Debug)]
#[non_exhaustive]
pub struct Player {
pub username: String,
pub platform: String,
pub ubisoft_id: String,
pub uplay_id: Option<String>,
pub avatar_url_146: Option<String>,
pub avatar_url_256: Option<String>,
pub stats: Stats,
pub score: f32,
pub position: u16,
}
/// Deserialized stats.
#[derive(Deserialize, Clone, Debug)]
#[non_exhaustive]
pub struct Stats {
pub level: u16,
pub kd: f32,
pub wl: f32,
}
| Leaderboard |
packages.js | require=(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
module.exports = function blacklist (src) {
var copy = {}, filter = arguments[1]
if (typeof filter === 'string') {
filter = {}
for (var i = 1; i < arguments.length; i++) {
filter[arguments[i]] = true
}
}
for (var key in src) {
// blacklist?
if (filter[key]) continue
copy[key] = src[key]
}
return copy
}
},{}],2:[function(require,module,exports){
'use strict';
/**
* Convert the value given in bytes into bytes, KB, MB, GB or TB.
*
* @param {number} value Value to convert.
*
* @returns {{value: number, unit: string}} The converted value and the second the unit.
*/
function convertValue(value) {
var tbValue = ((1 << 30) * 1024),
gbValue = 1 << 30,
mbValue = 1 << 20,
kbValue = 1 << 10,
absValue = Math.abs(value),
unit = 'B';
if (absValue >= tbValue) {
value = Math.round(value / tbValue * 100) / 100;
unit = 'TB';
} else if (absValue >= gbValue) {
value = Math.round(value / gbValue * 100) / 100;
unit = 'GB';
} else if (absValue >= mbValue) {
value = Math.round(value / mbValue * 100) / 100;
unit = 'MB';
} else if (absValue >= kbValue) {
value = Math.round(value / kbValue * 100) / 100;
unit = 'kB';
}
return {
value: value,
unit: unit
};
}
/**
* @see {@link parser.convert()}
*
* @param {number} value Value to convert
* @param {{
* thousandsSeparator: string|null
* }} [options] See byte parser options.
*
* @return {string|null}
* @api public
*/
module.exports = function (value, options) {
if (typeof value !== 'number') {
return null;
}
var converterResult = convertValue(value);
var convertedValue = converterResult.value;
var thousandsSeparator = (options && options.thousandsSeparator) || '';
var unit = converterResult.unit;
if (thousandsSeparator) {
convertedValue = convertedValue.toString().replace(/\B(?=(\d{3})+(?!\d))/g, thousandsSeparator);
}
return convertedValue + unit;
};
},{}],3:[function(require,module,exports){
'use strict';
/**
* @see {@link parser.parse()}
*
* @param {string} stringValue
* @returns {number|null}
*/
module.exports = function(stringValue) {
// Expect value to be a string
if (typeof stringValue !== 'string') {
return null;
}
// Test if the string passed is valid
var results = stringValue.match(/^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb)$/i);
var floatValue;
var unit = 'b';
if (!results) {
// Nothing could be extracted from the given string
floatValue = parseInt(stringValue);
unit = 'b'
} else {
// Retrieve the value and the unit
floatValue = parseFloat(results[1]);
unit = results[4].toLowerCase();
}
var map = {
b: 1,
kb: 1 << 10,
mb: 1 << 20,
gb: 1 << 30,
tb: ((1 << 30) * 1024)
};
return map[unit] * floatValue;
};
},{}],4:[function(require,module,exports){
'use strict';
var React = require('react');
var Option = React.createClass({
displayName: 'Value',
propTypes: {
label: React.PropTypes.string.isRequired
},
blockEvent: function blockEvent(event) {
event.stopPropagation();
},
render: function render() {
var label = this.props.label;
if (this.props.optionLabelClick) {
label = React.createElement(
'a',
{ className: 'Select-item-label__a',
onMouseDown: this.blockEvent,
onTouchEnd: this.props.onOptionLabelClick,
onClick: this.props.onOptionLabelClick },
label
);
}
return React.createElement(
'div',
{ className: 'Select-item' },
React.createElement(
'span',
{ className: 'Select-item-icon',
onMouseDown: this.blockEvent,
onClick: this.props.onRemove,
onTouchEnd: this.props.onRemove },
'×'
),
React.createElement(
'span',
{ className: 'Select-item-label' },
label
)
);
}
});
module.exports = Option;
},{"react":"react"}],5:[function(require,module,exports){
'use strict';
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var React = require('react');
var sizerStyle = { position: 'absolute', visibility: 'hidden', height: 0, width: 0, overflow: 'scroll', whiteSpace: 'nowrap' };
var AutosizeInput = React.createClass({
displayName: 'AutosizeInput',
propTypes: {
value: React.PropTypes.any, // field value
defaultValue: React.PropTypes.any, // default field value
onChange: React.PropTypes.func, // onChange handler: function(newValue) {}
style: React.PropTypes.object, // css styles for the outer element
className: React.PropTypes.string, // className for the outer element
minWidth: React.PropTypes.oneOfType([// minimum width for input element
React.PropTypes.number, React.PropTypes.string]),
inputStyle: React.PropTypes.object, // css styles for the input element
inputClassName: React.PropTypes.string // className for the input element
},
getDefaultProps: function getDefaultProps() {
return {
minWidth: 1
};
},
getInitialState: function getInitialState() {
return {
inputWidth: this.props.minWidth
};
},
componentDidMount: function componentDidMount() {
this.copyInputStyles();
this.updateInputWidth();
},
componentDidUpdate: function componentDidUpdate() {
this.updateInputWidth();
},
copyInputStyles: function copyInputStyles() {
if (!this.isMounted() || !window.getComputedStyle) {
return;
}
var inputStyle = window.getComputedStyle(this.refs.input.getDOMNode());
var widthNode = this.refs.sizer.getDOMNode();
widthNode.style.fontSize = inputStyle.fontSize;
widthNode.style.fontFamily = inputStyle.fontFamily;
if (this.props.placeholder) {
var placeholderNode = this.refs.placeholderSizer.getDOMNode();
placeholderNode.style.fontSize = inputStyle.fontSize;
placeholderNode.style.fontFamily = inputStyle.fontFamily;
}
},
updateInputWidth: function updateInputWidth() {
if (!this.isMounted() || typeof this.refs.sizer.getDOMNode().scrollWidth === 'undefined') {
return;
}
var newInputWidth;
if (this.props.placeholder) {
newInputWidth = Math.max(this.refs.sizer.getDOMNode().scrollWidth, this.refs.placeholderSizer.getDOMNode().scrollWidth) + 2;
} else {
newInputWidth = this.refs.sizer.getDOMNode().scrollWidth + 2;
}
if (newInputWidth < this.props.minWidth) {
newInputWidth = this.props.minWidth;
}
if (newInputWidth !== this.state.inputWidth) {
this.setState({
inputWidth: newInputWidth
});
}
},
getInput: function getInput() {
return this.refs.input;
},
focus: function focus() {
this.refs.input.getDOMNode().focus();
},
select: function select() {
this.refs.input.getDOMNode().select();
},
render: function render() {
var nbspValue = (this.props.value || '').replace(/ /g, ' ');
var wrapperStyle = this.props.style || {};
wrapperStyle.display = 'inline-block';
var inputStyle = this.props.inputStyle || {};
inputStyle.width = this.state.inputWidth;
var placeholder = this.props.placeholder ? React.createElement(
'div',
{ ref: 'placeholderSizer', style: sizerStyle },
this.props.placeholder
) : null;
return React.createElement(
'div',
{ className: this.props.className, style: wrapperStyle },
React.createElement('input', _extends({}, this.props, { ref: 'input', className: this.props.inputClassName, style: inputStyle })),
React.createElement('div', { ref: 'sizer', style: sizerStyle, dangerouslySetInnerHTML: { __html: nbspValue } }),
placeholder
);
}
});
module.exports = AutosizeInput;
},{"react":"react"}],6:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule AutoFocusMixin
* @typechecks static-only
*/
'use strict';
var focusNode = require("./focusNode");
var AutoFocusMixin = {
componentDidMount: function() {
if (this.props.autoFocus) {
focusNode(this.getDOMNode());
}
}
};
module.exports = AutoFocusMixin;
},{"./focusNode":139}],7:[function(require,module,exports){
/**
* Copyright 2013-2015 Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule BeforeInputEventPlugin
* @typechecks static-only
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPropagators = require("./EventPropagators");
var ExecutionEnvironment = require("./ExecutionEnvironment");
var FallbackCompositionState = require("./FallbackCompositionState");
var SyntheticCompositionEvent = require("./SyntheticCompositionEvent");
var SyntheticInputEvent = require("./SyntheticInputEvent");
var keyOf = require("./keyOf");
var END_KEYCODES = [9, 13, 27, 32]; // Tab, Return, Esc, Space
var START_KEYCODE = 229;
var canUseCompositionEvent = (
ExecutionEnvironment.canUseDOM &&
'CompositionEvent' in window
);
var documentMode = null;
if (ExecutionEnvironment.canUseDOM && 'documentMode' in document) {
documentMode = document.documentMode;
}
// Webkit offers a very useful `textInput` event that can be used to
// directly represent `beforeInput`. The IE `textinput` event is not as
// useful, so we don't use it.
var canUseTextInputEvent = (
ExecutionEnvironment.canUseDOM &&
'TextEvent' in window &&
!documentMode &&
!isPresto()
);
// In IE9+, we have access to composition events, but the data supplied
// by the native compositionend event may be incorrect. Japanese ideographic
// spaces, for instance (\u3000) are not recorded correctly.
var useFallbackCompositionData = (
ExecutionEnvironment.canUseDOM &&
(
(!canUseCompositionEvent || documentMode && documentMode > 8 && documentMode <= 11)
)
);
/**
* Opera <= 12 includes TextEvent in window, but does not fire
* text input events. Rely on keypress instead.
*/
function isPresto() {
var opera = window.opera;
return (
typeof opera === 'object' &&
typeof opera.version === 'function' &&
parseInt(opera.version(), 10) <= 12
);
}
var SPACEBAR_CODE = 32;
var SPACEBAR_CHAR = String.fromCharCode(SPACEBAR_CODE);
var topLevelTypes = EventConstants.topLevelTypes;
// Events and their corresponding property names.
var eventTypes = {
beforeInput: {
phasedRegistrationNames: {
bubbled: keyOf({onBeforeInput: null}),
captured: keyOf({onBeforeInputCapture: null})
},
dependencies: [
topLevelTypes.topCompositionEnd,
topLevelTypes.topKeyPress,
topLevelTypes.topTextInput,
topLevelTypes.topPaste
]
},
compositionEnd: {
phasedRegistrationNames: {
bubbled: keyOf({onCompositionEnd: null}),
captured: keyOf({onCompositionEndCapture: null})
},
dependencies: [
topLevelTypes.topBlur,
topLevelTypes.topCompositionEnd,
topLevelTypes.topKeyDown,
topLevelTypes.topKeyPress,
topLevelTypes.topKeyUp,
topLevelTypes.topMouseDown
]
},
compositionStart: {
phasedRegistrationNames: {
bubbled: keyOf({onCompositionStart: null}),
captured: keyOf({onCompositionStartCapture: null})
},
dependencies: [
topLevelTypes.topBlur,
topLevelTypes.topCompositionStart,
topLevelTypes.topKeyDown,
topLevelTypes.topKeyPress,
topLevelTypes.topKeyUp,
topLevelTypes.topMouseDown
]
},
compositionUpdate: {
phasedRegistrationNames: {
bubbled: keyOf({onCompositionUpdate: null}),
captured: keyOf({onCompositionUpdateCapture: null})
},
dependencies: [
topLevelTypes.topBlur,
topLevelTypes.topCompositionUpdate,
topLevelTypes.topKeyDown,
topLevelTypes.topKeyPress,
topLevelTypes.topKeyUp,
topLevelTypes.topMouseDown
]
}
};
// Track whether we've ever handled a keypress on the space key.
var hasSpaceKeypress = false;
/**
* Return whether a native keypress event is assumed to be a command.
* This is required because Firefox fires `keypress` events for key commands
* (cut, copy, select-all, etc.) even though no character is inserted.
*/
function isKeypressCommand(nativeEvent) {
return (
(nativeEvent.ctrlKey || nativeEvent.altKey || nativeEvent.metaKey) &&
// ctrlKey && altKey is equivalent to AltGr, and is not a command.
!(nativeEvent.ctrlKey && nativeEvent.altKey)
);
}
/**
* Translate native top level events into event types.
*
* @param {string} topLevelType
* @return {object}
*/
function getCompositionEventType(topLevelType) {
switch (topLevelType) {
case topLevelTypes.topCompositionStart:
return eventTypes.compositionStart;
case topLevelTypes.topCompositionEnd:
return eventTypes.compositionEnd;
case topLevelTypes.topCompositionUpdate:
return eventTypes.compositionUpdate;
}
}
/**
* Does our fallback best-guess model think this event signifies that
* composition has begun?
*
* @param {string} topLevelType
* @param {object} nativeEvent
* @return {boolean}
*/
function isFallbackCompositionStart(topLevelType, nativeEvent) {
return (
topLevelType === topLevelTypes.topKeyDown &&
nativeEvent.keyCode === START_KEYCODE
);
}
/**
* Does our fallback mode think that this event is the end of composition?
*
* @param {string} topLevelType
* @param {object} nativeEvent
* @return {boolean}
*/
function isFallbackCompositionEnd(topLevelType, nativeEvent) {
switch (topLevelType) {
case topLevelTypes.topKeyUp:
// Command keys insert or clear IME input.
return (END_KEYCODES.indexOf(nativeEvent.keyCode) !== -1);
case topLevelTypes.topKeyDown:
// Expect IME keyCode on each keydown. If we get any other
// code we must have exited earlier.
return (nativeEvent.keyCode !== START_KEYCODE);
case topLevelTypes.topKeyPress:
case topLevelTypes.topMouseDown:
case topLevelTypes.topBlur:
// Events are not possible without cancelling IME.
return true;
default:
return false;
}
}
/**
* Google Input Tools provides composition data via a CustomEvent,
* with the `data` property populated in the `detail` object. If this
* is available on the event object, use it. If not, this is a plain
* composition event and we have nothing special to extract.
*
* @param {object} nativeEvent
* @return {?string}
*/
function getDataFromCustomEvent(nativeEvent) {
var detail = nativeEvent.detail;
if (typeof detail === 'object' && 'data' in detail) {
return detail.data;
}
return null;
}
// Track the current IME composition fallback object, if any.
var currentComposition = null;
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {?object} A SyntheticCompositionEvent.
*/
function extractCompositionEvent(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
) {
var eventType;
var fallbackData;
if (canUseCompositionEvent) {
eventType = getCompositionEventType(topLevelType);
} else if (!currentComposition) {
if (isFallbackCompositionStart(topLevelType, nativeEvent)) {
eventType = eventTypes.compositionStart;
}
} else if (isFallbackCompositionEnd(topLevelType, nativeEvent)) {
eventType = eventTypes.compositionEnd;
}
if (!eventType) {
return null;
}
if (useFallbackCompositionData) {
// The current composition is stored statically and must not be
// overwritten while composition continues.
if (!currentComposition && eventType === eventTypes.compositionStart) {
currentComposition = FallbackCompositionState.getPooled(topLevelTarget);
} else if (eventType === eventTypes.compositionEnd) {
if (currentComposition) {
fallbackData = currentComposition.getData();
}
}
}
var event = SyntheticCompositionEvent.getPooled(
eventType,
topLevelTargetID,
nativeEvent
);
if (fallbackData) {
// Inject data generated from fallback path into the synthetic event.
// This matches the property of native CompositionEventInterface.
event.data = fallbackData;
} else {
var customData = getDataFromCustomEvent(nativeEvent);
if (customData !== null) {
event.data = customData;
}
}
EventPropagators.accumulateTwoPhaseDispatches(event);
return event;
}
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {object} nativeEvent Native browser event.
* @return {?string} The string corresponding to this `beforeInput` event.
*/
function getNativeBeforeInputChars(topLevelType, nativeEvent) {
switch (topLevelType) {
case topLevelTypes.topCompositionEnd:
return getDataFromCustomEvent(nativeEvent);
case topLevelTypes.topKeyPress:
/**
* If native `textInput` events are available, our goal is to make
* use of them. However, there is a special case: the spacebar key.
* In Webkit, preventing default on a spacebar `textInput` event
* cancels character insertion, but it *also* causes the browser
* to fall back to its default spacebar behavior of scrolling the
* page.
*
* Tracking at:
* https://code.google.com/p/chromium/issues/detail?id=355103
*
* To avoid this issue, use the keypress event as if no `textInput`
* event is available.
*/
var which = nativeEvent.which;
if (which !== SPACEBAR_CODE) {
return null;
}
hasSpaceKeypress = true;
return SPACEBAR_CHAR;
case topLevelTypes.topTextInput:
// Record the characters to be added to the DOM.
var chars = nativeEvent.data;
// If it's a spacebar character, assume that we have already handled
// it at the keypress level and bail immediately. Android Chrome
// doesn't give us keycodes, so we need to blacklist it.
if (chars === SPACEBAR_CHAR && hasSpaceKeypress) {
return null;
}
return chars;
default:
// For other native event types, do nothing.
return null;
}
}
/**
* For browsers that do not provide the `textInput` event, extract the
* appropriate string to use for SyntheticInputEvent.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {object} nativeEvent Native browser event.
* @return {?string} The fallback string for this `beforeInput` event.
*/
function getFallbackBeforeInputChars(topLevelType, nativeEvent) {
// If we are currently composing (IME) and using a fallback to do so,
// try to extract the composed characters from the fallback object.
if (currentComposition) {
if (
topLevelType === topLevelTypes.topCompositionEnd ||
isFallbackCompositionEnd(topLevelType, nativeEvent)
) {
var chars = currentComposition.getData();
FallbackCompositionState.release(currentComposition);
currentComposition = null;
return chars;
}
return null;
}
switch (topLevelType) {
case topLevelTypes.topPaste:
// If a paste event occurs after a keypress, throw out the input
// chars. Paste events should not lead to BeforeInput events.
return null;
case topLevelTypes.topKeyPress:
/**
* As of v27, Firefox may fire keypress events even when no character
* will be inserted. A few possibilities:
*
* - `which` is `0`. Arrow keys, Esc key, etc.
*
* - `which` is the pressed key code, but no char is available.
* Ex: 'AltGr + d` in Polish. There is no modified character for
* this key combination and no character is inserted into the
* document, but FF fires the keypress for char code `100` anyway.
* No `input` event will occur.
*
* - `which` is the pressed key code, but a command combination is
* being used. Ex: `Cmd+C`. No character is inserted, and no
* `input` event will occur.
*/
if (nativeEvent.which && !isKeypressCommand(nativeEvent)) {
return String.fromCharCode(nativeEvent.which);
}
return null;
case topLevelTypes.topCompositionEnd:
return useFallbackCompositionData ? null : nativeEvent.data;
default:
return null;
}
}
/**
* Extract a SyntheticInputEvent for `beforeInput`, based on either native
* `textInput` or fallback behavior.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {?object} A SyntheticInputEvent.
*/
function extractBeforeInputEvent(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
) {
var chars;
if (canUseTextInputEvent) {
chars = getNativeBeforeInputChars(topLevelType, nativeEvent);
} else {
chars = getFallbackBeforeInputChars(topLevelType, nativeEvent);
}
// If no characters are being inserted, no BeforeInput event should
// be fired.
if (!chars) {
return null;
}
var event = SyntheticInputEvent.getPooled(
eventTypes.beforeInput,
topLevelTargetID,
nativeEvent
);
event.data = chars;
EventPropagators.accumulateTwoPhaseDispatches(event);
return event;
}
/**
* Create an `onBeforeInput` event to match
* http://www.w3.org/TR/2013/WD-DOM-Level-3-Events-20131105/#events-inputevents.
*
* This event plugin is based on the native `textInput` event
* available in Chrome, Safari, Opera, and IE. This event fires after
* `onKeyPress` and `onCompositionEnd`, but before `onInput`.
*
* `beforeInput` is spec'd but not implemented in any browsers, and
* the `input` event does not provide any useful information about what has
* actually been added, contrary to the spec. Thus, `textInput` is the best
* available event to identify the characters that have actually been inserted
* into the target node.
*
* This plugin is also responsible for emitting `composition` events, thus
* allowing us to share composition fallback code for both `beforeInput` and
* `composition` event types.
*/
var BeforeInputEventPlugin = {
eventTypes: eventTypes,
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @see {EventPluginHub.extractEvents}
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
) {
return [
extractCompositionEvent(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
),
extractBeforeInputEvent(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
)
];
}
};
module.exports = BeforeInputEventPlugin;
},{"./EventConstants":20,"./EventPropagators":25,"./ExecutionEnvironment":26,"./FallbackCompositionState":27,"./SyntheticCompositionEvent":111,"./SyntheticInputEvent":115,"./keyOf":162}],8:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule CSSCore
* @typechecks
*/
var invariant = require("./invariant");
/**
* The CSSCore module specifies the API (and implements most of the methods)
* that should be used when dealing with the display of elements (via their
* CSS classes and visibility on screen. It is an API focused on mutating the
* display and not reading it as no logical state should be encoded in the
* display of elements.
*/
var CSSCore = {
/**
* Adds the class passed in to the element if it doesn't already have it.
*
* @param {DOMElement} element the element to set the class on
* @param {string} className the CSS className
* @return {DOMElement} the element passed in
*/
addClass: function(element, className) {
("production" !== "production" ? invariant(
!/\s/.test(className),
'CSSCore.addClass takes only a single class name. "%s" contains ' +
'multiple classes.', className
) : invariant(!/\s/.test(className)));
if (className) {
if (element.classList) {
element.classList.add(className);
} else if (!CSSCore.hasClass(element, className)) {
element.className = element.className + ' ' + className;
}
}
return element;
},
/**
* Removes the class passed in from the element
*
* @param {DOMElement} element the element to set the class on
* @param {string} className the CSS className
* @return {DOMElement} the element passed in
*/
removeClass: function(element, className) {
("production" !== "production" ? invariant(
!/\s/.test(className),
'CSSCore.removeClass takes only a single class name. "%s" contains ' +
'multiple classes.', className
) : invariant(!/\s/.test(className)));
if (className) {
if (element.classList) {
element.classList.remove(className);
} else if (CSSCore.hasClass(element, className)) {
element.className = element.className
.replace(new RegExp('(^|\\s)' + className + '(?:\\s|$)', 'g'), '$1')
.replace(/\s+/g, ' ') // multiple spaces to one
.replace(/^\s*|\s*$/g, ''); // trim the ends
}
}
return element;
},
/**
* Helper to add or remove a class from an element based on a condition.
*
* @param {DOMElement} element the element to set the class on
* @param {string} className the CSS className
* @param {*} bool condition to whether to add or remove the class
* @return {DOMElement} the element passed in
*/
conditionClass: function(element, className, bool) {
return (bool ? CSSCore.addClass : CSSCore.removeClass)(element, className);
},
/**
* Tests whether the element has the class specified.
*
* @param {DOMNode|DOMWindow} element the element to set the class on
* @param {string} className the CSS className
* @return {boolean} true if the element has the class, false if not
*/
hasClass: function(element, className) {
("production" !== "production" ? invariant(
!/\s/.test(className),
'CSS.hasClass takes only a single class name.'
) : invariant(!/\s/.test(className)));
if (element.classList) {
return !!className && element.classList.contains(className);
}
return (' ' + element.className + ' ').indexOf(' ' + className + ' ') > -1;
}
};
module.exports = CSSCore;
},{"./invariant":155}],9:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule CSSProperty
*/
'use strict';
/**
* CSS properties which accept numbers but are not in units of "px".
*/
var isUnitlessNumber = {
boxFlex: true,
boxFlexGroup: true,
columnCount: true,
flex: true,
flexGrow: true,
flexPositive: true,
flexShrink: true,
flexNegative: true,
fontWeight: true,
lineClamp: true,
lineHeight: true,
opacity: true,
order: true,
orphans: true,
widows: true,
zIndex: true,
zoom: true,
// SVG-related properties
fillOpacity: true,
strokeDashoffset: true,
strokeOpacity: true,
strokeWidth: true
};
/**
* @param {string} prefix vendor-specific prefix, eg: Webkit
* @param {string} key style name, eg: transitionDuration
* @return {string} style name prefixed with `prefix`, properly camelCased, eg:
* WebkitTransitionDuration
*/
function prefixKey(prefix, key) {
return prefix + key.charAt(0).toUpperCase() + key.substring(1);
}
/**
* Support style names that may come passed in prefixed by adding permutations
* of vendor prefixes.
*/
var prefixes = ['Webkit', 'ms', 'Moz', 'O'];
// Using Object.keys here, or else the vanilla for-in loop makes IE8 go into an
// infinite loop, because it iterates over the newly added props too.
Object.keys(isUnitlessNumber).forEach(function(prop) {
prefixes.forEach(function(prefix) {
isUnitlessNumber[prefixKey(prefix, prop)] = isUnitlessNumber[prop];
});
});
/**
* Most style properties can be unset by doing .style[prop] = '' but IE8
* doesn't like doing that with shorthand properties so for the properties that
* IE8 breaks on, which are listed here, we instead unset each of the
* individual properties. See http://bugs.jquery.com/ticket/12385.
* The 4-value 'clock' properties like margin, padding, border-width seem to
* behave without any problems. Curiously, list-style works too without any
* special prodding.
*/
var shorthandPropertyExpansions = {
background: {
backgroundImage: true,
backgroundPosition: true,
backgroundRepeat: true,
backgroundColor: true
},
border: {
borderWidth: true,
borderStyle: true,
borderColor: true
},
borderBottom: {
borderBottomWidth: true,
borderBottomStyle: true,
borderBottomColor: true
},
borderLeft: {
borderLeftWidth: true,
borderLeftStyle: true,
borderLeftColor: true
},
borderRight: {
borderRightWidth: true,
borderRightStyle: true,
borderRightColor: true
},
borderTop: {
borderTopWidth: true,
borderTopStyle: true,
borderTopColor: true
},
font: {
fontStyle: true,
fontVariant: true,
fontWeight: true,
fontSize: true,
lineHeight: true,
fontFamily: true
}
};
var CSSProperty = {
isUnitlessNumber: isUnitlessNumber,
shorthandPropertyExpansions: shorthandPropertyExpansions
};
module.exports = CSSProperty;
},{}],10:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule CSSPropertyOperations
* @typechecks static-only
*/
'use strict';
var CSSProperty = require("./CSSProperty");
var ExecutionEnvironment = require("./ExecutionEnvironment");
var camelizeStyleName = require("./camelizeStyleName");
var dangerousStyleValue = require("./dangerousStyleValue");
var hyphenateStyleName = require("./hyphenateStyleName");
var memoizeStringOnly = require("./memoizeStringOnly");
var warning = require("./warning");
var processStyleName = memoizeStringOnly(function(styleName) {
return hyphenateStyleName(styleName);
});
var styleFloatAccessor = 'cssFloat';
if (ExecutionEnvironment.canUseDOM) {
// IE8 only supports accessing cssFloat (standard) as styleFloat
if (document.documentElement.style.cssFloat === undefined) {
styleFloatAccessor = 'styleFloat';
}
}
if ("production" !== "production") {
// 'msTransform' is correct, but the other prefixes should be capitalized
var badVendoredStyleNamePattern = /^(?:webkit|moz|o)[A-Z]/;
// style values shouldn't contain a semicolon
var badStyleValueWithSemicolonPattern = /;\s*$/;
var warnedStyleNames = {};
var warnedStyleValues = {};
var warnHyphenatedStyleName = function(name) {
if (warnedStyleNames.hasOwnProperty(name) && warnedStyleNames[name]) {
return;
}
warnedStyleNames[name] = true;
("production" !== "production" ? warning(
false,
'Unsupported style property %s. Did you mean %s?',
name,
camelizeStyleName(name)
) : null);
};
var warnBadVendoredStyleName = function(name) {
if (warnedStyleNames.hasOwnProperty(name) && warnedStyleNames[name]) {
return;
}
warnedStyleNames[name] = true;
("production" !== "production" ? warning(
false,
'Unsupported vendor-prefixed style property %s. Did you mean %s?',
name,
name.charAt(0).toUpperCase() + name.slice(1)
) : null);
};
var warnStyleValueWithSemicolon = function(name, value) {
if (warnedStyleValues.hasOwnProperty(value) && warnedStyleValues[value]) {
return;
}
warnedStyleValues[value] = true;
("production" !== "production" ? warning(
false,
'Style property values shouldn\'t contain a semicolon. ' +
'Try "%s: %s" instead.',
name,
value.replace(badStyleValueWithSemicolonPattern, '')
) : null);
};
/**
* @param {string} name
* @param {*} value
*/
var warnValidStyle = function(name, value) {
if (name.indexOf('-') > -1) {
warnHyphenatedStyleName(name);
} else if (badVendoredStyleNamePattern.test(name)) {
warnBadVendoredStyleName(name);
} else if (badStyleValueWithSemicolonPattern.test(value)) {
warnStyleValueWithSemicolon(name, value);
}
};
}
/**
* Operations for dealing with CSS properties.
*/
var CSSPropertyOperations = {
/**
* Serializes a mapping of style properties for use as inline styles:
*
* > createMarkupForStyles({width: '200px', height: 0})
* "width:200px;height:0;"
*
* Undefined values are ignored so that declarative programming is easier.
* The result should be HTML-escaped before insertion into the DOM.
*
* @param {object} styles
* @return {?string}
*/
createMarkupForStyles: function(styles) {
var serialized = '';
for (var styleName in styles) {
if (!styles.hasOwnProperty(styleName)) {
continue;
}
var styleValue = styles[styleName];
if ("production" !== "production") {
warnValidStyle(styleName, styleValue);
}
if (styleValue != null) {
serialized += processStyleName(styleName) + ':';
serialized += dangerousStyleValue(styleName, styleValue) + ';';
}
}
return serialized || null;
},
/**
* Sets the value for multiple styles on a node. If a value is specified as
* '' (empty string), the corresponding style property will be unset.
*
* @param {DOMElement} node
* @param {object} styles
*/
setValueForStyles: function(node, styles) {
var style = node.style;
for (var styleName in styles) {
if (!styles.hasOwnProperty(styleName)) {
continue;
}
if ("production" !== "production") {
warnValidStyle(styleName, styles[styleName]);
}
var styleValue = dangerousStyleValue(styleName, styles[styleName]);
if (styleName === 'float') {
styleName = styleFloatAccessor;
}
if (styleValue) {
style[styleName] = styleValue;
} else {
var expansion = CSSProperty.shorthandPropertyExpansions[styleName];
if (expansion) {
// Shorthand property that IE8 won't like unsetting, so unset each
// component to placate it
for (var individualStyleName in expansion) {
style[individualStyleName] = '';
}
} else {
style[styleName] = '';
}
}
}
}
};
module.exports = CSSPropertyOperations;
},{"./CSSProperty":9,"./ExecutionEnvironment":26,"./camelizeStyleName":126,"./dangerousStyleValue":133,"./hyphenateStyleName":153,"./memoizeStringOnly":164,"./warning":176}],11:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule CallbackQueue
*/
'use strict';
var PooledClass = require("./PooledClass");
var assign = require("./Object.assign");
var invariant = require("./invariant");
/**
* A specialized pseudo-event module to help keep track of components waiting to
* be notified when their DOM representations are available for use.
*
* This implements `PooledClass`, so you should never need to instantiate this.
* Instead, use `CallbackQueue.getPooled()`.
*
* @class ReactMountReady
* @implements PooledClass
* @internal
*/
function CallbackQueue() {
this._callbacks = null;
this._contexts = null;
}
assign(CallbackQueue.prototype, {
/**
* Enqueues a callback to be invoked when `notifyAll` is invoked.
*
* @param {function} callback Invoked when `notifyAll` is invoked.
* @param {?object} context Context to call `callback` with.
* @internal
*/
enqueue: function(callback, context) {
this._callbacks = this._callbacks || [];
this._contexts = this._contexts || [];
this._callbacks.push(callback);
this._contexts.push(context);
},
/**
* Invokes all enqueued callbacks and clears the queue. This is invoked after
* the DOM representation of a component has been created or updated.
*
* @internal
*/
notifyAll: function() {
var callbacks = this._callbacks;
var contexts = this._contexts;
if (callbacks) {
("production" !== "production" ? invariant(
callbacks.length === contexts.length,
'Mismatched list of contexts in callback queue'
) : invariant(callbacks.length === contexts.length));
this._callbacks = null;
this._contexts = null;
for (var i = 0, l = callbacks.length; i < l; i++) {
callbacks[i].call(contexts[i]);
}
callbacks.length = 0;
contexts.length = 0;
}
},
/**
* Resets the internal queue.
*
* @internal
*/
reset: function() {
this._callbacks = null;
this._contexts = null;
},
/**
* `PooledClass` looks for this.
*/
destructor: function() {
this.reset();
}
});
PooledClass.addPoolingTo(CallbackQueue);
module.exports = CallbackQueue;
},{"./Object.assign":33,"./PooledClass":34,"./invariant":155}],12:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ChangeEventPlugin
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var EventPropagators = require("./EventPropagators");
var ExecutionEnvironment = require("./ExecutionEnvironment");
var ReactUpdates = require("./ReactUpdates");
var SyntheticEvent = require("./SyntheticEvent");
var isEventSupported = require("./isEventSupported");
var isTextInputElement = require("./isTextInputElement");
var keyOf = require("./keyOf");
var topLevelTypes = EventConstants.topLevelTypes;
var eventTypes = {
change: {
phasedRegistrationNames: {
bubbled: keyOf({onChange: null}),
captured: keyOf({onChangeCapture: null})
},
dependencies: [
topLevelTypes.topBlur,
topLevelTypes.topChange,
topLevelTypes.topClick,
topLevelTypes.topFocus,
topLevelTypes.topInput,
topLevelTypes.topKeyDown,
topLevelTypes.topKeyUp,
topLevelTypes.topSelectionChange
]
}
};
/**
* For IE shims
*/
var activeElement = null;
var activeElementID = null;
var activeElementValue = null;
var activeElementValueProp = null;
/**
* SECTION: handle `change` event
*/
function shouldUseChangeEvent(elem) {
return (
elem.nodeName === 'SELECT' ||
(elem.nodeName === 'INPUT' && elem.type === 'file')
);
}
var doesChangeEventBubble = false;
if (ExecutionEnvironment.canUseDOM) {
// See `handleChange` comment below
doesChangeEventBubble = isEventSupported('change') && (
(!('documentMode' in document) || document.documentMode > 8)
);
}
function manualDispatchChangeEvent(nativeEvent) {
var event = SyntheticEvent.getPooled(
eventTypes.change,
activeElementID,
nativeEvent
);
EventPropagators.accumulateTwoPhaseDispatches(event);
// If change and propertychange bubbled, we'd just bind to it like all the
// other events and have it go through ReactBrowserEventEmitter. Since it
// doesn't, we manually listen for the events and so we have to enqueue and
// process the abstract event manually.
//
// Batching is necessary here in order to ensure that all event handlers run
// before the next rerender (including event handlers attached to ancestor
// elements instead of directly on the input). Without this, controlled
// components don't work properly in conjunction with event bubbling because
// the component is rerendered and the value reverted before all the event
// handlers can run. See https://github.com/facebook/react/issues/708.
ReactUpdates.batchedUpdates(runEventInBatch, event);
}
function runEventInBatch(event) {
EventPluginHub.enqueueEvents(event);
EventPluginHub.processEventQueue();
}
function startWatchingForChangeEventIE8(target, targetID) {
activeElement = target;
activeElementID = targetID;
activeElement.attachEvent('onchange', manualDispatchChangeEvent);
}
function stopWatchingForChangeEventIE8() {
if (!activeElement) {
return;
}
activeElement.detachEvent('onchange', manualDispatchChangeEvent);
activeElement = null;
activeElementID = null;
}
function getTargetIDForChangeEvent(
topLevelType,
topLevelTarget,
topLevelTargetID) {
if (topLevelType === topLevelTypes.topChange) {
return topLevelTargetID;
}
}
function handleEventsForChangeEventIE8(
topLevelType,
topLevelTarget,
topLevelTargetID) {
if (topLevelType === topLevelTypes.topFocus) {
// stopWatching() should be a noop here but we call it just in case we
// missed a blur event somehow.
stopWatchingForChangeEventIE8();
startWatchingForChangeEventIE8(topLevelTarget, topLevelTargetID);
} else if (topLevelType === topLevelTypes.topBlur) {
stopWatchingForChangeEventIE8();
}
}
/**
* SECTION: handle `input` event
*/
var isInputEventSupported = false;
if (ExecutionEnvironment.canUseDOM) {
// IE9 claims to support the input event but fails to trigger it when
// deleting text, so we ignore its input events
isInputEventSupported = isEventSupported('input') && (
(!('documentMode' in document) || document.documentMode > 9)
);
}
/**
* (For old IE.) Replacement getter/setter for the `value` property that gets
* set on the active element.
*/
var newValueProp = {
get: function() {
return activeElementValueProp.get.call(this);
},
set: function(val) {
// Cast to a string so we can do equality checks.
activeElementValue = '' + val;
activeElementValueProp.set.call(this, val);
}
};
/**
* (For old IE.) Starts tracking propertychange events on the passed-in element
* and override the value property so that we can distinguish user events from
* value changes in JS.
*/
function startWatchingForValueChange(target, targetID) {
activeElement = target;
activeElementID = targetID;
activeElementValue = target.value;
activeElementValueProp = Object.getOwnPropertyDescriptor(
target.constructor.prototype,
'value'
);
Object.defineProperty(activeElement, 'value', newValueProp);
activeElement.attachEvent('onpropertychange', handlePropertyChange);
}
/**
* (For old IE.) Removes the event listeners from the currently-tracked element,
* if any exists.
*/
function stopWatchingForValueChange() {
if (!activeElement) {
return;
}
// delete restores the original property definition
delete activeElement.value;
activeElement.detachEvent('onpropertychange', handlePropertyChange);
activeElement = null;
activeElementID = null;
activeElementValue = null;
activeElementValueProp = null;
}
/**
* (For old IE.) Handles a propertychange event, sending a `change` event if
* the value of the active element has changed.
*/
function handlePropertyChange(nativeEvent) {
if (nativeEvent.propertyName !== 'value') {
return;
}
var value = nativeEvent.srcElement.value;
if (value === activeElementValue) {
return;
}
activeElementValue = value;
manualDispatchChangeEvent(nativeEvent);
}
/**
* If a `change` event should be fired, returns the target's ID.
*/
function getTargetIDForInputEvent(
topLevelType,
topLevelTarget,
topLevelTargetID) {
if (topLevelType === topLevelTypes.topInput) {
// In modern browsers (i.e., not IE8 or IE9), the input event is exactly
// what we want so fall through here and trigger an abstract event
return topLevelTargetID;
}
}
// For IE8 and IE9.
function handleEventsForInputEventIE(
topLevelType,
topLevelTarget,
topLevelTargetID) {
if (topLevelType === topLevelTypes.topFocus) {
// In IE8, we can capture almost all .value changes by adding a
// propertychange handler and looking for events with propertyName
// equal to 'value'
// In IE9, propertychange fires for most input events but is buggy and
// doesn't fire when text is deleted, but conveniently, selectionchange
// appears to fire in all of the remaining cases so we catch those and
// forward the event if the value has changed
// In either case, we don't want to call the event handler if the value
// is changed from JS so we redefine a setter for `.value` that updates
// our activeElementValue variable, allowing us to ignore those changes
//
// stopWatching() should be a noop here but we call it just in case we
// missed a blur event somehow.
stopWatchingForValueChange();
startWatchingForValueChange(topLevelTarget, topLevelTargetID);
} else if (topLevelType === topLevelTypes.topBlur) {
stopWatchingForValueChange();
}
}
// For IE8 and IE9.
function getTargetIDForInputEventIE(
topLevelType,
topLevelTarget,
topLevelTargetID) {
if (topLevelType === topLevelTypes.topSelectionChange ||
topLevelType === topLevelTypes.topKeyUp ||
topLevelType === topLevelTypes.topKeyDown) {
// On the selectionchange event, the target is just document which isn't
// helpful for us so just check activeElement instead.
//
// 99% of the time, keydown and keyup aren't necessary. IE8 fails to fire
// propertychange on the first input event after setting `value` from a
// script and fires only keydown, keypress, keyup. Catching keyup usually
// gets it and catching keydown lets us fire an event for the first
// keystroke if user does a key repeat (it'll be a little delayed: right
// before the second keystroke). Other input methods (e.g., paste) seem to
// fire selectionchange normally.
if (activeElement && activeElement.value !== activeElementValue) {
activeElementValue = activeElement.value;
return activeElementID;
}
}
}
/**
* SECTION: handle `click` event
*/
function shouldUseClickEvent(elem) {
// Use the `click` event to detect changes to checkbox and radio inputs.
// This approach works across all browsers, whereas `change` does not fire
// until `blur` in IE8.
return (
elem.nodeName === 'INPUT' &&
(elem.type === 'checkbox' || elem.type === 'radio')
);
}
function getTargetIDForClickEvent(
topLevelType,
topLevelTarget,
topLevelTargetID) {
if (topLevelType === topLevelTypes.topClick) {
return topLevelTargetID;
}
}
/**
* This plugin creates an `onChange` event that normalizes change events
* across form elements. This event fires at a time when it's possible to
* change the element's value without seeing a flicker.
*
* Supported elements are:
* - input (see `isTextInputElement`)
* - textarea
* - select
*/
var ChangeEventPlugin = {
eventTypes: eventTypes,
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @see {EventPluginHub.extractEvents}
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
var getTargetIDFunc, handleEventFunc;
if (shouldUseChangeEvent(topLevelTarget)) {
if (doesChangeEventBubble) {
getTargetIDFunc = getTargetIDForChangeEvent;
} else {
handleEventFunc = handleEventsForChangeEventIE8;
}
} else if (isTextInputElement(topLevelTarget)) {
if (isInputEventSupported) {
getTargetIDFunc = getTargetIDForInputEvent;
} else {
getTargetIDFunc = getTargetIDForInputEventIE;
handleEventFunc = handleEventsForInputEventIE;
}
} else if (shouldUseClickEvent(topLevelTarget)) {
getTargetIDFunc = getTargetIDForClickEvent;
}
if (getTargetIDFunc) {
var targetID = getTargetIDFunc(
topLevelType,
topLevelTarget,
topLevelTargetID
);
if (targetID) {
var event = SyntheticEvent.getPooled(
eventTypes.change,
targetID,
nativeEvent
);
EventPropagators.accumulateTwoPhaseDispatches(event);
return event;
}
}
if (handleEventFunc) {
handleEventFunc(
topLevelType,
topLevelTarget,
topLevelTargetID
);
}
}
};
module.exports = ChangeEventPlugin;
},{"./EventConstants":20,"./EventPluginHub":22,"./EventPropagators":25,"./ExecutionEnvironment":26,"./ReactUpdates":104,"./SyntheticEvent":113,"./isEventSupported":156,"./isTextInputElement":158,"./keyOf":162}],13:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ClientReactRootIndex
* @typechecks
*/
'use strict';
var nextReactRootIndex = 0;
var ClientReactRootIndex = {
createReactRootIndex: function() {
return nextReactRootIndex++;
}
};
module.exports = ClientReactRootIndex;
},{}],14:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule DOMChildrenOperations
* @typechecks static-only
*/
'use strict';
var Danger = require("./Danger");
var ReactMultiChildUpdateTypes = require("./ReactMultiChildUpdateTypes");
var setTextContent = require("./setTextContent");
var invariant = require("./invariant");
/**
* Inserts `childNode` as a child of `parentNode` at the `index`.
*
* @param {DOMElement} parentNode Parent node in which to insert.
* @param {DOMElement} childNode Child node to insert.
* @param {number} index Index at which to insert the child.
* @internal
*/
function insertChildAt(parentNode, childNode, index) {
// By exploiting arrays returning `undefined` for an undefined index, we can
// rely exclusively on `insertBefore(node, null)` instead of also using
// `appendChild(node)`. However, using `undefined` is not allowed by all
// browsers so we must replace it with `null`.
parentNode.insertBefore(
childNode,
parentNode.childNodes[index] || null
);
}
/**
* Operations for updating with DOM children.
*/
var DOMChildrenOperations = {
dangerouslyReplaceNodeWithMarkup: Danger.dangerouslyReplaceNodeWithMarkup,
updateTextContent: setTextContent,
/**
* Updates a component's children by processing a series of updates. The
* update configurations are each expected to have a `parentNode` property.
*
* @param {array<object>} updates List of update configurations.
* @param {array<string>} markupList List of markup strings.
* @internal
*/
processUpdates: function(updates, markupList) {
var update;
// Mapping from parent IDs to initial child orderings.
var initialChildren = null;
// List of children that will be moved or removed.
var updatedChildren = null;
for (var i = 0; i < updates.length; i++) {
update = updates[i];
if (update.type === ReactMultiChildUpdateTypes.MOVE_EXISTING ||
update.type === ReactMultiChildUpdateTypes.REMOVE_NODE) {
var updatedIndex = update.fromIndex;
var updatedChild = update.parentNode.childNodes[updatedIndex];
var parentID = update.parentID;
("production" !== "production" ? invariant(
updatedChild,
'processUpdates(): Unable to find child %s of element. This ' +
'probably means the DOM was unexpectedly mutated (e.g., by the ' +
'browser), usually due to forgetting a <tbody> when using tables, ' +
'nesting tags like <form>, <p>, or <a>, or using non-SVG elements ' +
'in an <svg> parent. Try inspecting the child nodes of the element ' +
'with React ID `%s`.',
updatedIndex,
parentID
) : invariant(updatedChild));
initialChildren = initialChildren || {};
initialChildren[parentID] = initialChildren[parentID] || [];
initialChildren[parentID][updatedIndex] = updatedChild;
updatedChildren = updatedChildren || [];
updatedChildren.push(updatedChild);
}
}
var renderedMarkup = Danger.dangerouslyRenderMarkup(markupList);
// Remove updated children first so that `toIndex` is consistent.
if (updatedChildren) {
for (var j = 0; j < updatedChildren.length; j++) {
updatedChildren[j].parentNode.removeChild(updatedChildren[j]);
}
}
for (var k = 0; k < updates.length; k++) {
update = updates[k];
switch (update.type) {
case ReactMultiChildUpdateTypes.INSERT_MARKUP:
insertChildAt(
update.parentNode,
renderedMarkup[update.markupIndex],
update.toIndex
);
break;
case ReactMultiChildUpdateTypes.MOVE_EXISTING:
insertChildAt(
update.parentNode,
initialChildren[update.parentID][update.fromIndex],
update.toIndex
);
break;
case ReactMultiChildUpdateTypes.TEXT_CONTENT:
setTextContent(
update.parentNode,
update.textContent
);
break;
case ReactMultiChildUpdateTypes.REMOVE_NODE:
// Already removed by the for-loop above.
break;
}
}
}
};
module.exports = DOMChildrenOperations;
},{"./Danger":17,"./ReactMultiChildUpdateTypes":83,"./invariant":155,"./setTextContent":170}],15:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule DOMProperty
* @typechecks static-only
*/
/*jslint bitwise: true */
'use strict';
var invariant = require("./invariant");
function checkMask(value, bitmask) {
return (value & bitmask) === bitmask;
}
var DOMPropertyInjection = {
/**
* Mapping from normalized, camelcased property names to a configuration that
* specifies how the associated DOM property should be accessed or rendered.
*/
MUST_USE_ATTRIBUTE: 0x1,
MUST_USE_PROPERTY: 0x2,
HAS_SIDE_EFFECTS: 0x4,
HAS_BOOLEAN_VALUE: 0x8,
HAS_NUMERIC_VALUE: 0x10,
HAS_POSITIVE_NUMERIC_VALUE: 0x20 | 0x10,
HAS_OVERLOADED_BOOLEAN_VALUE: 0x40,
/**
* Inject some specialized knowledge about the DOM. This takes a config object
* with the following properties:
*
* isCustomAttribute: function that given an attribute name will return true
* if it can be inserted into the DOM verbatim. Useful for data-* or aria-*
* attributes where it's impossible to enumerate all of the possible
* attribute names,
*
* Properties: object mapping DOM property name to one of the
* DOMPropertyInjection constants or null. If your attribute isn't in here,
* it won't get written to the DOM.
*
* DOMAttributeNames: object mapping React attribute name to the DOM
* attribute name. Attribute names not specified use the **lowercase**
* normalized name.
*
* DOMPropertyNames: similar to DOMAttributeNames but for DOM properties.
* Property names not specified use the normalized name.
*
* DOMMutationMethods: Properties that require special mutation methods. If
* `value` is undefined, the mutation method should unset the property.
*
* @param {object} domPropertyConfig the config as described above.
*/
injectDOMPropertyConfig: function(domPropertyConfig) {
var Properties = domPropertyConfig.Properties || {};
var DOMAttributeNames = domPropertyConfig.DOMAttributeNames || {};
var DOMPropertyNames = domPropertyConfig.DOMPropertyNames || {};
var DOMMutationMethods = domPropertyConfig.DOMMutationMethods || {};
if (domPropertyConfig.isCustomAttribute) {
DOMProperty._isCustomAttributeFunctions.push(
domPropertyConfig.isCustomAttribute
);
}
for (var propName in Properties) {
("production" !== "production" ? invariant(
!DOMProperty.isStandardName.hasOwnProperty(propName),
'injectDOMPropertyConfig(...): You\'re trying to inject DOM property ' +
'\'%s\' which has already been injected. You may be accidentally ' +
'injecting the same DOM property config twice, or you may be ' +
'injecting two configs that have conflicting property names.',
propName
) : invariant(!DOMProperty.isStandardName.hasOwnProperty(propName)));
DOMProperty.isStandardName[propName] = true;
var lowerCased = propName.toLowerCase();
DOMProperty.getPossibleStandardName[lowerCased] = propName;
if (DOMAttributeNames.hasOwnProperty(propName)) {
var attributeName = DOMAttributeNames[propName];
DOMProperty.getPossibleStandardName[attributeName] = propName;
DOMProperty.getAttributeName[propName] = attributeName;
} else {
DOMProperty.getAttributeName[propName] = lowerCased;
}
DOMProperty.getPropertyName[propName] =
DOMPropertyNames.hasOwnProperty(propName) ?
DOMPropertyNames[propName] :
propName;
if (DOMMutationMethods.hasOwnProperty(propName)) {
DOMProperty.getMutationMethod[propName] = DOMMutationMethods[propName];
} else {
DOMProperty.getMutationMethod[propName] = null;
}
var propConfig = Properties[propName];
DOMProperty.mustUseAttribute[propName] =
checkMask(propConfig, DOMPropertyInjection.MUST_USE_ATTRIBUTE);
DOMProperty.mustUseProperty[propName] =
checkMask(propConfig, DOMPropertyInjection.MUST_USE_PROPERTY);
DOMProperty.hasSideEffects[propName] =
checkMask(propConfig, DOMPropertyInjection.HAS_SIDE_EFFECTS);
DOMProperty.hasBooleanValue[propName] =
checkMask(propConfig, DOMPropertyInjection.HAS_BOOLEAN_VALUE);
DOMProperty.hasNumericValue[propName] =
checkMask(propConfig, DOMPropertyInjection.HAS_NUMERIC_VALUE);
DOMProperty.hasPositiveNumericValue[propName] =
checkMask(propConfig, DOMPropertyInjection.HAS_POSITIVE_NUMERIC_VALUE);
DOMProperty.hasOverloadedBooleanValue[propName] =
checkMask(propConfig, DOMPropertyInjection.HAS_OVERLOADED_BOOLEAN_VALUE);
("production" !== "production" ? invariant(
!DOMProperty.mustUseAttribute[propName] ||
!DOMProperty.mustUseProperty[propName],
'DOMProperty: Cannot require using both attribute and property: %s',
propName
) : invariant(!DOMProperty.mustUseAttribute[propName] ||
!DOMProperty.mustUseProperty[propName]));
("production" !== "production" ? invariant(
DOMProperty.mustUseProperty[propName] ||
!DOMProperty.hasSideEffects[propName],
'DOMProperty: Properties that have side effects must use property: %s',
propName
) : invariant(DOMProperty.mustUseProperty[propName] ||
!DOMProperty.hasSideEffects[propName]));
("production" !== "production" ? invariant(
!!DOMProperty.hasBooleanValue[propName] +
!!DOMProperty.hasNumericValue[propName] +
!!DOMProperty.hasOverloadedBooleanValue[propName] <= 1,
'DOMProperty: Value can be one of boolean, overloaded boolean, or ' +
'numeric value, but not a combination: %s',
propName
) : invariant(!!DOMProperty.hasBooleanValue[propName] +
!!DOMProperty.hasNumericValue[propName] +
!!DOMProperty.hasOverloadedBooleanValue[propName] <= 1));
}
}
};
var defaultValueCache = {};
/**
* DOMProperty exports lookup objects that can be used like functions:
*
* > DOMProperty.isValid['id']
* true
* > DOMProperty.isValid['foobar']
* undefined
*
* Although this may be confusing, it performs better in general.
*
* @see http://jsperf.com/key-exists
* @see http://jsperf.com/key-missing
*/
var DOMProperty = {
ID_ATTRIBUTE_NAME: 'data-reactid',
/**
* Checks whether a property name is a standard property.
* @type {Object}
*/
isStandardName: {},
/**
* Mapping from lowercase property names to the properly cased version, used
* to warn in the case of missing properties.
* @type {Object}
*/
getPossibleStandardName: {},
/**
* Mapping from normalized names to attribute names that differ. Attribute
* names are used when rendering markup or with `*Attribute()`.
* @type {Object}
*/
getAttributeName: {},
/**
* Mapping from normalized names to properties on DOM node instances.
* (This includes properties that mutate due to external factors.)
* @type {Object}
*/
getPropertyName: {},
/**
* Mapping from normalized names to mutation methods. This will only exist if
* mutation cannot be set simply by the property or `setAttribute()`.
* @type {Object}
*/
getMutationMethod: {},
/**
* Whether the property must be accessed and mutated as an object property.
* @type {Object}
*/
mustUseAttribute: {},
/**
* Whether the property must be accessed and mutated using `*Attribute()`.
* (This includes anything that fails `<propName> in <element>`.)
* @type {Object}
*/
mustUseProperty: {},
/**
* Whether or not setting a value causes side effects such as triggering
* resources to be loaded or text selection changes. We must ensure that
* the value is only set if it has changed.
* @type {Object}
*/
hasSideEffects: {},
/**
* Whether the property should be removed when set to a falsey value.
* @type {Object}
*/
hasBooleanValue: {},
/**
* Whether the property must be numeric or parse as a
* numeric and should be removed when set to a falsey value.
* @type {Object}
*/
hasNumericValue: {},
/**
* Whether the property must be positive numeric or parse as a positive
* numeric and should be removed when set to a falsey value.
* @type {Object}
*/
hasPositiveNumericValue: {},
/**
* Whether the property can be used as a flag as well as with a value. Removed
* when strictly equal to false; present without a value when strictly equal
* to true; present with a value otherwise.
* @type {Object}
*/
hasOverloadedBooleanValue: {},
/**
* All of the isCustomAttribute() functions that have been injected.
*/
_isCustomAttributeFunctions: [],
/**
* Checks whether a property name is a custom attribute.
* @method
*/
isCustomAttribute: function(attributeName) {
for (var i = 0; i < DOMProperty._isCustomAttributeFunctions.length; i++) {
var isCustomAttributeFn = DOMProperty._isCustomAttributeFunctions[i];
if (isCustomAttributeFn(attributeName)) {
return true;
}
}
return false;
},
/**
* Returns the default property value for a DOM property (i.e., not an
* attribute). Most default values are '' or false, but not all. Worse yet,
* some (in particular, `type`) vary depending on the type of element.
*
* TODO: Is it better to grab all the possible properties when creating an
* element to avoid having to create the same element twice?
*/
getDefaultValueForProperty: function(nodeName, prop) {
var nodeDefaults = defaultValueCache[nodeName];
var testElement;
if (!nodeDefaults) {
defaultValueCache[nodeName] = nodeDefaults = {};
}
if (!(prop in nodeDefaults)) {
testElement = document.createElement(nodeName);
nodeDefaults[prop] = testElement[prop];
}
return nodeDefaults[prop];
},
injection: DOMPropertyInjection
};
module.exports = DOMProperty;
},{"./invariant":155}],16:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule DOMPropertyOperations
* @typechecks static-only
*/
'use strict';
var DOMProperty = require("./DOMProperty");
var quoteAttributeValueForBrowser = require("./quoteAttributeValueForBrowser");
var warning = require("./warning");
function shouldIgnoreValue(name, value) {
return value == null ||
(DOMProperty.hasBooleanValue[name] && !value) ||
(DOMProperty.hasNumericValue[name] && isNaN(value)) ||
(DOMProperty.hasPositiveNumericValue[name] && (value < 1)) ||
(DOMProperty.hasOverloadedBooleanValue[name] && value === false);
}
if ("production" !== "production") {
var reactProps = {
children: true,
dangerouslySetInnerHTML: true,
key: true,
ref: true
};
var warnedProperties = {};
var warnUnknownProperty = function(name) {
if (reactProps.hasOwnProperty(name) && reactProps[name] ||
warnedProperties.hasOwnProperty(name) && warnedProperties[name]) {
return;
}
warnedProperties[name] = true;
var lowerCasedName = name.toLowerCase();
// data-* attributes should be lowercase; suggest the lowercase version
var standardName = (
DOMProperty.isCustomAttribute(lowerCasedName) ?
lowerCasedName :
DOMProperty.getPossibleStandardName.hasOwnProperty(lowerCasedName) ?
DOMProperty.getPossibleStandardName[lowerCasedName] :
null
);
// For now, only warn when we have a suggested correction. This prevents
// logging too much when using transferPropsTo.
("production" !== "production" ? warning(
standardName == null,
'Unknown DOM property %s. Did you mean %s?',
name,
standardName
) : null);
};
}
/**
* Operations for dealing with DOM properties.
*/
var DOMPropertyOperations = {
/**
* Creates markup for the ID property.
*
* @param {string} id Unescaped ID.
* @return {string} Markup string.
*/
createMarkupForID: function(id) {
return DOMProperty.ID_ATTRIBUTE_NAME + '=' +
quoteAttributeValueForBrowser(id);
},
/**
* Creates markup for a property.
*
* @param {string} name
* @param {*} value
* @return {?string} Markup string, or null if the property was invalid.
*/
createMarkupForProperty: function(name, value) {
if (DOMProperty.isStandardName.hasOwnProperty(name) &&
DOMProperty.isStandardName[name]) {
if (shouldIgnoreValue(name, value)) {
return '';
}
var attributeName = DOMProperty.getAttributeName[name];
if (DOMProperty.hasBooleanValue[name] ||
(DOMProperty.hasOverloadedBooleanValue[name] && value === true)) {
return attributeName;
}
return attributeName + '=' + quoteAttributeValueForBrowser(value);
} else if (DOMProperty.isCustomAttribute(name)) {
if (value == null) {
return '';
}
return name + '=' + quoteAttributeValueForBrowser(value);
} else if ("production" !== "production") {
warnUnknownProperty(name);
}
return null;
},
/**
* Sets the value for a property on a node.
*
* @param {DOMElement} node
* @param {string} name
* @param {*} value
*/
setValueForProperty: function(node, name, value) {
if (DOMProperty.isStandardName.hasOwnProperty(name) &&
DOMProperty.isStandardName[name]) {
var mutationMethod = DOMProperty.getMutationMethod[name];
if (mutationMethod) {
mutationMethod(node, value);
} else if (shouldIgnoreValue(name, value)) {
this.deleteValueForProperty(node, name);
} else if (DOMProperty.mustUseAttribute[name]) {
// `setAttribute` with objects becomes only `[object]` in IE8/9,
// ('' + value) makes it output the correct toString()-value.
node.setAttribute(DOMProperty.getAttributeName[name], '' + value);
} else {
var propName = DOMProperty.getPropertyName[name];
// Must explicitly cast values for HAS_SIDE_EFFECTS-properties to the
// property type before comparing; only `value` does and is string.
if (!DOMProperty.hasSideEffects[name] ||
('' + node[propName]) !== ('' + value)) {
// Contrary to `setAttribute`, object properties are properly
// `toString`ed by IE8/9.
node[propName] = value;
}
}
} else if (DOMProperty.isCustomAttribute(name)) {
if (value == null) {
node.removeAttribute(name);
} else {
node.setAttribute(name, '' + value);
}
} else if ("production" !== "production") {
warnUnknownProperty(name);
}
},
/**
* Deletes the value for a property on a node.
*
* @param {DOMElement} node
* @param {string} name
*/
deleteValueForProperty: function(node, name) {
if (DOMProperty.isStandardName.hasOwnProperty(name) &&
DOMProperty.isStandardName[name]) {
var mutationMethod = DOMProperty.getMutationMethod[name];
if (mutationMethod) {
mutationMethod(node, undefined);
} else if (DOMProperty.mustUseAttribute[name]) {
node.removeAttribute(DOMProperty.getAttributeName[name]);
} else {
var propName = DOMProperty.getPropertyName[name];
var defaultValue = DOMProperty.getDefaultValueForProperty(
node.nodeName,
propName
);
if (!DOMProperty.hasSideEffects[name] ||
('' + node[propName]) !== defaultValue) {
node[propName] = defaultValue;
}
}
} else if (DOMProperty.isCustomAttribute(name)) {
node.removeAttribute(name);
} else if ("production" !== "production") {
warnUnknownProperty(name);
}
}
};
module.exports = DOMPropertyOperations;
},{"./DOMProperty":15,"./quoteAttributeValueForBrowser":168,"./warning":176}],17:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule Danger
* @typechecks static-only
*/
/*jslint evil: true, sub: true */
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
var createNodesFromMarkup = require("./createNodesFromMarkup");
var emptyFunction = require("./emptyFunction");
var getMarkupWrap = require("./getMarkupWrap");
var invariant = require("./invariant");
var OPEN_TAG_NAME_EXP = /^(<[^ \/>]+)/;
var RESULT_INDEX_ATTR = 'data-danger-index';
/**
* Extracts the `nodeName` from a string of markup.
*
* NOTE: Extracting the `nodeName` does not require a regular expression match
* because we make assumptions about React-generated markup (i.e. there are no
* spaces surrounding the opening tag and there is at least one attribute).
*
* @param {string} markup String of markup.
* @return {string} Node name of the supplied markup.
* @see http://jsperf.com/extract-nodename
*/
function getNodeName(markup) {
return markup.substring(1, markup.indexOf(' '));
}
var Danger = {
/**
* Renders markup into an array of nodes. The markup is expected to render
* into a list of root nodes. Also, the length of `resultList` and
* `markupList` should be the same.
*
* @param {array<string>} markupList List of markup strings to render.
* @return {array<DOMElement>} List of rendered nodes.
* @internal
*/
dangerouslyRenderMarkup: function(markupList) {
("production" !== "production" ? invariant(
ExecutionEnvironment.canUseDOM,
'dangerouslyRenderMarkup(...): Cannot render markup in a worker ' +
'thread. Make sure `window` and `document` are available globally ' +
'before requiring React when unit testing or use ' +
'React.renderToString for server rendering.'
) : invariant(ExecutionEnvironment.canUseDOM));
var nodeName;
var markupByNodeName = {};
// Group markup by `nodeName` if a wrap is necessary, else by '*'.
for (var i = 0; i < markupList.length; i++) {
("production" !== "production" ? invariant(
markupList[i],
'dangerouslyRenderMarkup(...): Missing markup.'
) : invariant(markupList[i]));
nodeName = getNodeName(markupList[i]);
nodeName = getMarkupWrap(nodeName) ? nodeName : '*';
markupByNodeName[nodeName] = markupByNodeName[nodeName] || [];
markupByNodeName[nodeName][i] = markupList[i];
}
var resultList = [];
var resultListAssignmentCount = 0;
for (nodeName in markupByNodeName) {
if (!markupByNodeName.hasOwnProperty(nodeName)) {
continue;
}
var markupListByNodeName = markupByNodeName[nodeName];
// This for-in loop skips the holes of the sparse array. The order of
// iteration should follow the order of assignment, which happens to match
// numerical index order, but we don't rely on that.
var resultIndex;
for (resultIndex in markupListByNodeName) {
if (markupListByNodeName.hasOwnProperty(resultIndex)) {
var markup = markupListByNodeName[resultIndex];
// Push the requested markup with an additional RESULT_INDEX_ATTR
// attribute. If the markup does not start with a < character, it
// will be discarded below (with an appropriate console.error).
markupListByNodeName[resultIndex] = markup.replace(
OPEN_TAG_NAME_EXP,
// This index will be parsed back out below.
'$1 ' + RESULT_INDEX_ATTR + '="' + resultIndex + '" '
);
}
}
// Render each group of markup with similar wrapping `nodeName`.
var renderNodes = createNodesFromMarkup(
markupListByNodeName.join(''),
emptyFunction // Do nothing special with <script> tags.
);
for (var j = 0; j < renderNodes.length; ++j) {
var renderNode = renderNodes[j];
if (renderNode.hasAttribute &&
renderNode.hasAttribute(RESULT_INDEX_ATTR)) {
resultIndex = +renderNode.getAttribute(RESULT_INDEX_ATTR);
renderNode.removeAttribute(RESULT_INDEX_ATTR);
("production" !== "production" ? invariant(
!resultList.hasOwnProperty(resultIndex),
'Danger: Assigning to an already-occupied result index.'
) : invariant(!resultList.hasOwnProperty(resultIndex)));
resultList[resultIndex] = renderNode;
// This should match resultList.length and markupList.length when
// we're done.
resultListAssignmentCount += 1;
} else if ("production" !== "production") {
console.error(
'Danger: Discarding unexpected node:',
renderNode
);
}
}
}
// Although resultList was populated out of order, it should now be a dense
// array.
("production" !== "production" ? invariant(
resultListAssignmentCount === resultList.length,
'Danger: Did not assign to every index of resultList.'
) : invariant(resultListAssignmentCount === resultList.length));
("production" !== "production" ? invariant(
resultList.length === markupList.length,
'Danger: Expected markup to render %s nodes, but rendered %s.',
markupList.length,
resultList.length
) : invariant(resultList.length === markupList.length));
return resultList;
},
/**
* Replaces a node with a string of markup at its current position within its
* parent. The markup must render into a single root node.
*
* @param {DOMElement} oldChild Child node to replace.
* @param {string} markup Markup to render in place of the child node.
* @internal
*/
dangerouslyReplaceNodeWithMarkup: function(oldChild, markup) {
("production" !== "production" ? invariant(
ExecutionEnvironment.canUseDOM,
'dangerouslyReplaceNodeWithMarkup(...): Cannot render markup in a ' +
'worker thread. Make sure `window` and `document` are available ' +
'globally before requiring React when unit testing or use ' +
'React.renderToString for server rendering.'
) : invariant(ExecutionEnvironment.canUseDOM));
("production" !== "production" ? invariant(markup, 'dangerouslyReplaceNodeWithMarkup(...): Missing markup.') : invariant(markup));
("production" !== "production" ? invariant(
oldChild.tagName.toLowerCase() !== 'html',
'dangerouslyReplaceNodeWithMarkup(...): Cannot replace markup of the ' +
'<html> node. This is because browser quirks make this unreliable ' +
'and/or slow. If you want to render to the root you must use ' +
'server rendering. See React.renderToString().'
) : invariant(oldChild.tagName.toLowerCase() !== 'html'));
var newChild = createNodesFromMarkup(markup, emptyFunction)[0];
oldChild.parentNode.replaceChild(newChild, oldChild);
}
};
module.exports = Danger;
},{"./ExecutionEnvironment":26,"./createNodesFromMarkup":131,"./emptyFunction":134,"./getMarkupWrap":147,"./invariant":155}],18:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule DefaultEventPluginOrder
*/
'use strict';
var keyOf = require("./keyOf");
/**
* Module that is injectable into `EventPluginHub`, that specifies a
* deterministic ordering of `EventPlugin`s. A convenient way to reason about
* plugins, without having to package every one of them. This is better than
* having plugins be ordered in the same order that they are injected because
* that ordering would be influenced by the packaging order.
* `ResponderEventPlugin` must occur before `SimpleEventPlugin` so that
* preventing default on events is convenient in `SimpleEventPlugin` handlers.
*/
var DefaultEventPluginOrder = [
keyOf({ResponderEventPlugin: null}),
keyOf({SimpleEventPlugin: null}),
keyOf({TapEventPlugin: null}),
keyOf({EnterLeaveEventPlugin: null}),
keyOf({ChangeEventPlugin: null}),
keyOf({SelectEventPlugin: null}),
keyOf({BeforeInputEventPlugin: null}),
keyOf({AnalyticsEventPlugin: null}),
keyOf({MobileSafariClickEventPlugin: null})
];
module.exports = DefaultEventPluginOrder;
},{"./keyOf":162}],19:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EnterLeaveEventPlugin
* @typechecks static-only
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPropagators = require("./EventPropagators");
var SyntheticMouseEvent = require("./SyntheticMouseEvent");
var ReactMount = require("./ReactMount");
var keyOf = require("./keyOf");
var topLevelTypes = EventConstants.topLevelTypes;
var getFirstReactDOM = ReactMount.getFirstReactDOM;
var eventTypes = {
mouseEnter: {
registrationName: keyOf({onMouseEnter: null}),
dependencies: [
topLevelTypes.topMouseOut,
topLevelTypes.topMouseOver
]
},
mouseLeave: {
registrationName: keyOf({onMouseLeave: null}),
dependencies: [
topLevelTypes.topMouseOut,
topLevelTypes.topMouseOver
]
}
};
var extractedEvents = [null, null];
var EnterLeaveEventPlugin = {
eventTypes: eventTypes,
/**
* For almost every interaction we care about, there will be both a top-level
* `mouseover` and `mouseout` event that occurs. Only use `mouseout` so that
* we do not extract duplicate events. However, moving the mouse into the
* browser from outside will not fire a `mouseout` event. In this case, we use
* the `mouseover` top-level event.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @see {EventPluginHub.extractEvents}
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
if (topLevelType === topLevelTypes.topMouseOver &&
(nativeEvent.relatedTarget || nativeEvent.fromElement)) {
return null;
}
if (topLevelType !== topLevelTypes.topMouseOut &&
topLevelType !== topLevelTypes.topMouseOver) {
// Must not be a mouse in or mouse out - ignoring.
return null;
}
var win;
if (topLevelTarget.window === topLevelTarget) {
// `topLevelTarget` is probably a window object.
win = topLevelTarget;
} else {
// TODO: Figure out why `ownerDocument` is sometimes undefined in IE8.
var doc = topLevelTarget.ownerDocument;
if (doc) {
win = doc.defaultView || doc.parentWindow;
} else {
win = window;
}
}
var from, to;
if (topLevelType === topLevelTypes.topMouseOut) {
from = topLevelTarget;
to =
getFirstReactDOM(nativeEvent.relatedTarget || nativeEvent.toElement) ||
win;
} else {
from = win;
to = topLevelTarget;
}
if (from === to) {
// Nothing pertains to our managed components.
return null;
}
var fromID = from ? ReactMount.getID(from) : '';
var toID = to ? ReactMount.getID(to) : '';
var leave = SyntheticMouseEvent.getPooled(
eventTypes.mouseLeave,
fromID,
nativeEvent
);
leave.type = 'mouseleave';
leave.target = from;
leave.relatedTarget = to;
var enter = SyntheticMouseEvent.getPooled(
eventTypes.mouseEnter,
toID,
nativeEvent
);
enter.type = 'mouseenter';
enter.target = to;
enter.relatedTarget = from;
EventPropagators.accumulateEnterLeaveDispatches(leave, enter, fromID, toID);
extractedEvents[0] = leave;
extractedEvents[1] = enter;
return extractedEvents;
}
};
module.exports = EnterLeaveEventPlugin;
},{"./EventConstants":20,"./EventPropagators":25,"./ReactMount":81,"./SyntheticMouseEvent":117,"./keyOf":162}],20:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EventConstants
*/
'use strict';
var keyMirror = require("./keyMirror");
var PropagationPhases = keyMirror({bubbled: null, captured: null});
/**
* Types of raw signals from the browser caught at the top level.
*/
var topLevelTypes = keyMirror({
topBlur: null,
topChange: null,
topClick: null,
topCompositionEnd: null,
topCompositionStart: null,
topCompositionUpdate: null,
topContextMenu: null,
topCopy: null,
topCut: null,
topDoubleClick: null,
topDrag: null,
topDragEnd: null,
topDragEnter: null,
topDragExit: null,
topDragLeave: null,
topDragOver: null,
topDragStart: null,
topDrop: null,
topError: null,
topFocus: null,
topInput: null,
topKeyDown: null,
topKeyPress: null,
topKeyUp: null,
topLoad: null,
topMouseDown: null,
topMouseMove: null,
topMouseOut: null,
topMouseOver: null,
topMouseUp: null,
topPaste: null,
topReset: null,
topScroll: null,
topSelectionChange: null,
topSubmit: null,
topTextInput: null,
topTouchCancel: null,
topTouchEnd: null,
topTouchMove: null,
topTouchStart: null,
topWheel: null
});
var EventConstants = {
topLevelTypes: topLevelTypes,
PropagationPhases: PropagationPhases
};
module.exports = EventConstants;
},{"./keyMirror":161}],21:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @providesModule EventListener
* @typechecks
*/
var emptyFunction = require("./emptyFunction");
/**
* Upstream version of event listener. Does not take into account specific
* nature of platform.
*/
var EventListener = {
/**
* Listen to DOM events during the bubble phase.
*
* @param {DOMEventTarget} target DOM element to register listener on.
* @param {string} eventType Event type, e.g. 'click' or 'mouseover'.
* @param {function} callback Callback function.
* @return {object} Object with a `remove` method.
*/
listen: function(target, eventType, callback) {
if (target.addEventListener) {
target.addEventListener(eventType, callback, false);
return {
remove: function() {
target.removeEventListener(eventType, callback, false);
}
};
} else if (target.attachEvent) {
target.attachEvent('on' + eventType, callback);
return {
remove: function() {
target.detachEvent('on' + eventType, callback);
}
};
}
},
/**
* Listen to DOM events during the capture phase.
*
* @param {DOMEventTarget} target DOM element to register listener on.
* @param {string} eventType Event type, e.g. 'click' or 'mouseover'.
* @param {function} callback Callback function.
* @return {object} Object with a `remove` method.
*/
capture: function(target, eventType, callback) {
if (!target.addEventListener) {
if ("production" !== "production") {
console.error(
'Attempted to listen to events during the capture phase on a ' +
'browser that does not support the capture phase. Your application ' +
'will not receive some events.'
);
}
return {
remove: emptyFunction
};
} else {
target.addEventListener(eventType, callback, true);
return {
remove: function() {
target.removeEventListener(eventType, callback, true);
}
};
}
},
registerDefault: function() {}
};
module.exports = EventListener;
},{"./emptyFunction":134}],22:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EventPluginHub
*/
'use strict';
var EventPluginRegistry = require("./EventPluginRegistry");
var EventPluginUtils = require("./EventPluginUtils");
var accumulateInto = require("./accumulateInto");
var forEachAccumulated = require("./forEachAccumulated");
var invariant = require("./invariant");
/**
* Internal store for event listeners
*/
var listenerBank = {};
/**
* Internal queue of events that have accumulated their dispatches and are
* waiting to have their dispatches executed.
*/
var eventQueue = null;
/**
* Dispatches an event and releases it back into the pool, unless persistent.
*
* @param {?object} event Synthetic event to be dispatched.
* @private
*/
var executeDispatchesAndRelease = function(event) {
if (event) {
var executeDispatch = EventPluginUtils.executeDispatch;
// Plugins can provide custom behavior when dispatching events.
var PluginModule = EventPluginRegistry.getPluginModuleForEvent(event);
if (PluginModule && PluginModule.executeDispatch) {
executeDispatch = PluginModule.executeDispatch;
}
EventPluginUtils.executeDispatchesInOrder(event, executeDispatch);
if (!event.isPersistent()) {
event.constructor.release(event);
}
}
};
/**
* - `InstanceHandle`: [required] Module that performs logical traversals of DOM
* hierarchy given ids of the logical DOM elements involved.
*/
var InstanceHandle = null;
function validateInstanceHandle() {
var valid =
InstanceHandle &&
InstanceHandle.traverseTwoPhase &&
InstanceHandle.traverseEnterLeave;
("production" !== "production" ? invariant(
valid,
'InstanceHandle not injected before use!'
) : invariant(valid));
}
/**
* This is a unified interface for event plugins to be installed and configured.
*
* Event plugins can implement the following properties:
*
* `extractEvents` {function(string, DOMEventTarget, string, object): *}
* Required. When a top-level event is fired, this method is expected to
* extract synthetic events that will in turn be queued and dispatched.
*
* `eventTypes` {object}
* Optional, plugins that fire events must publish a mapping of registration
* names that are used to register listeners. Values of this mapping must
* be objects that contain `registrationName` or `phasedRegistrationNames`.
*
* `executeDispatch` {function(object, function, string)}
* Optional, allows plugins to override how an event gets dispatched. By
* default, the listener is simply invoked.
*
* Each plugin that is injected into `EventsPluginHub` is immediately operable.
*
* @public
*/
var EventPluginHub = {
/**
* Methods for injecting dependencies.
*/
injection: {
/**
* @param {object} InjectedMount
* @public
*/
injectMount: EventPluginUtils.injection.injectMount,
/**
* @param {object} InjectedInstanceHandle
* @public
*/
injectInstanceHandle: function(InjectedInstanceHandle) {
InstanceHandle = InjectedInstanceHandle;
if ("production" !== "production") {
validateInstanceHandle();
}
},
getInstanceHandle: function() {
if ("production" !== "production") {
validateInstanceHandle();
}
return InstanceHandle;
},
/**
* @param {array} InjectedEventPluginOrder
* @public
*/
injectEventPluginOrder: EventPluginRegistry.injectEventPluginOrder,
/**
* @param {object} injectedNamesToPlugins Map from names to plugin modules.
*/
injectEventPluginsByName: EventPluginRegistry.injectEventPluginsByName
},
eventNameDispatchConfigs: EventPluginRegistry.eventNameDispatchConfigs,
registrationNameModules: EventPluginRegistry.registrationNameModules,
/**
* Stores `listener` at `listenerBank[registrationName][id]`. Is idempotent.
*
* @param {string} id ID of the DOM element.
* @param {string} registrationName Name of listener (e.g. `onClick`).
* @param {?function} listener The callback to store.
*/
putListener: function(id, registrationName, listener) {
("production" !== "production" ? invariant(
!listener || typeof listener === 'function',
'Expected %s listener to be a function, instead got type %s',
registrationName, typeof listener
) : invariant(!listener || typeof listener === 'function'));
var bankForRegistrationName =
listenerBank[registrationName] || (listenerBank[registrationName] = {});
bankForRegistrationName[id] = listener;
},
/**
* @param {string} id ID of the DOM element.
* @param {string} registrationName Name of listener (e.g. `onClick`).
* @return {?function} The stored callback.
*/
getListener: function(id, registrationName) {
var bankForRegistrationName = listenerBank[registrationName];
return bankForRegistrationName && bankForRegistrationName[id];
},
/**
* Deletes a listener from the registration bank.
*
* @param {string} id ID of the DOM element.
* @param {string} registrationName Name of listener (e.g. `onClick`).
*/
deleteListener: function(id, registrationName) {
var bankForRegistrationName = listenerBank[registrationName];
if (bankForRegistrationName) {
delete bankForRegistrationName[id];
}
},
/**
* Deletes all listeners for the DOM element with the supplied ID.
*
* @param {string} id ID of the DOM element.
*/
deleteAllListeners: function(id) {
for (var registrationName in listenerBank) {
delete listenerBank[registrationName][id];
}
},
/**
* Allows registered plugins an opportunity to extract events from top-level
* native browser events.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @internal
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
var events;
var plugins = EventPluginRegistry.plugins;
for (var i = 0, l = plugins.length; i < l; i++) {
// Not every plugin in the ordering may be loaded at runtime.
var possiblePlugin = plugins[i];
if (possiblePlugin) {
var extractedEvents = possiblePlugin.extractEvents(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
);
if (extractedEvents) {
events = accumulateInto(events, extractedEvents);
}
}
}
return events;
},
/**
* Enqueues a synthetic event that should be dispatched when
* `processEventQueue` is invoked.
*
* @param {*} events An accumulation of synthetic events.
* @internal
*/
enqueueEvents: function(events) {
if (events) {
eventQueue = accumulateInto(eventQueue, events);
}
},
/**
* Dispatches all synthetic events on the event queue.
*
* @internal
*/
processEventQueue: function() {
// Set `eventQueue` to null before processing it so that we can tell if more
// events get enqueued while processing.
var processingEventQueue = eventQueue;
eventQueue = null;
forEachAccumulated(processingEventQueue, executeDispatchesAndRelease);
("production" !== "production" ? invariant(
!eventQueue,
'processEventQueue(): Additional events were enqueued while processing ' +
'an event queue. Support for this has not yet been implemented.'
) : invariant(!eventQueue));
},
/**
* These are needed for tests only. Do not use!
*/
__purge: function() {
listenerBank = {};
},
__getListenerBank: function() {
return listenerBank;
}
};
module.exports = EventPluginHub;
},{"./EventPluginRegistry":23,"./EventPluginUtils":24,"./accumulateInto":123,"./forEachAccumulated":140,"./invariant":155}],23:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EventPluginRegistry
* @typechecks static-only
*/
'use strict';
var invariant = require("./invariant");
/**
* Injectable ordering of event plugins.
*/
var EventPluginOrder = null;
/**
* Injectable mapping from names to event plugin modules.
*/
var namesToPlugins = {};
/**
* Recomputes the plugin list using the injected plugins and plugin ordering.
*
* @private
*/
function recomputePluginOrdering() {
if (!EventPluginOrder) {
// Wait until an `EventPluginOrder` is injected.
return;
}
for (var pluginName in namesToPlugins) {
var PluginModule = namesToPlugins[pluginName];
var pluginIndex = EventPluginOrder.indexOf(pluginName);
("production" !== "production" ? invariant(
pluginIndex > -1,
'EventPluginRegistry: Cannot inject event plugins that do not exist in ' +
'the plugin ordering, `%s`.',
pluginName
) : invariant(pluginIndex > -1));
if (EventPluginRegistry.plugins[pluginIndex]) {
continue;
}
("production" !== "production" ? invariant(
PluginModule.extractEvents,
'EventPluginRegistry: Event plugins must implement an `extractEvents` ' +
'method, but `%s` does not.',
pluginName
) : invariant(PluginModule.extractEvents));
EventPluginRegistry.plugins[pluginIndex] = PluginModule;
var publishedEvents = PluginModule.eventTypes;
for (var eventName in publishedEvents) {
("production" !== "production" ? invariant(
publishEventForPlugin(
publishedEvents[eventName],
PluginModule,
eventName
),
'EventPluginRegistry: Failed to publish event `%s` for plugin `%s`.',
eventName,
pluginName
) : invariant(publishEventForPlugin(
publishedEvents[eventName],
PluginModule,
eventName
)));
}
}
}
/**
* Publishes an event so that it can be dispatched by the supplied plugin.
*
* @param {object} dispatchConfig Dispatch configuration for the event.
* @param {object} PluginModule Plugin publishing the event.
* @return {boolean} True if the event was successfully published.
* @private
*/
function publishEventForPlugin(dispatchConfig, PluginModule, eventName) {
("production" !== "production" ? invariant(
!EventPluginRegistry.eventNameDispatchConfigs.hasOwnProperty(eventName),
'EventPluginHub: More than one plugin attempted to publish the same ' +
'event name, `%s`.',
eventName
) : invariant(!EventPluginRegistry.eventNameDispatchConfigs.hasOwnProperty(eventName)));
EventPluginRegistry.eventNameDispatchConfigs[eventName] = dispatchConfig;
var phasedRegistrationNames = dispatchConfig.phasedRegistrationNames;
if (phasedRegistrationNames) {
for (var phaseName in phasedRegistrationNames) {
if (phasedRegistrationNames.hasOwnProperty(phaseName)) {
var phasedRegistrationName = phasedRegistrationNames[phaseName];
publishRegistrationName(
phasedRegistrationName,
PluginModule,
eventName
);
}
}
return true;
} else if (dispatchConfig.registrationName) {
publishRegistrationName(
dispatchConfig.registrationName,
PluginModule,
eventName
);
return true;
}
return false;
}
/**
* Publishes a registration name that is used to identify dispatched events and
* can be used with `EventPluginHub.putListener` to register listeners.
*
* @param {string} registrationName Registration name to add.
* @param {object} PluginModule Plugin publishing the event.
* @private
*/
function publishRegistrationName(registrationName, PluginModule, eventName) {
("production" !== "production" ? invariant(
!EventPluginRegistry.registrationNameModules[registrationName],
'EventPluginHub: More than one plugin attempted to publish the same ' +
'registration name, `%s`.',
registrationName
) : invariant(!EventPluginRegistry.registrationNameModules[registrationName]));
EventPluginRegistry.registrationNameModules[registrationName] = PluginModule;
EventPluginRegistry.registrationNameDependencies[registrationName] =
PluginModule.eventTypes[eventName].dependencies;
}
/**
* Registers plugins so that they can extract and dispatch events.
*
* @see {EventPluginHub}
*/
var EventPluginRegistry = {
/**
* Ordered list of injected plugins.
*/
plugins: [],
/**
* Mapping from event name to dispatch config
*/
eventNameDispatchConfigs: {},
/**
* Mapping from registration name to plugin module
*/
registrationNameModules: {},
/**
* Mapping from registration name to event name
*/
registrationNameDependencies: {},
/**
* Injects an ordering of plugins (by plugin name). This allows the ordering
* to be decoupled from injection of the actual plugins so that ordering is
* always deterministic regardless of packaging, on-the-fly injection, etc.
*
* @param {array} InjectedEventPluginOrder
* @internal
* @see {EventPluginHub.injection.injectEventPluginOrder}
*/
injectEventPluginOrder: function(InjectedEventPluginOrder) {
("production" !== "production" ? invariant(
!EventPluginOrder,
'EventPluginRegistry: Cannot inject event plugin ordering more than ' +
'once. You are likely trying to load more than one copy of React.'
) : invariant(!EventPluginOrder));
// Clone the ordering so it cannot be dynamically mutated.
EventPluginOrder = Array.prototype.slice.call(InjectedEventPluginOrder);
recomputePluginOrdering();
},
/**
* Injects plugins to be used by `EventPluginHub`. The plugin names must be
* in the ordering injected by `injectEventPluginOrder`.
*
* Plugins can be injected as part of page initialization or on-the-fly.
*
* @param {object} injectedNamesToPlugins Map from names to plugin modules.
* @internal
* @see {EventPluginHub.injection.injectEventPluginsByName}
*/
injectEventPluginsByName: function(injectedNamesToPlugins) {
var isOrderingDirty = false;
for (var pluginName in injectedNamesToPlugins) {
if (!injectedNamesToPlugins.hasOwnProperty(pluginName)) {
continue;
}
var PluginModule = injectedNamesToPlugins[pluginName];
if (!namesToPlugins.hasOwnProperty(pluginName) ||
namesToPlugins[pluginName] !== PluginModule) {
("production" !== "production" ? invariant(
!namesToPlugins[pluginName],
'EventPluginRegistry: Cannot inject two different event plugins ' +
'using the same name, `%s`.',
pluginName
) : invariant(!namesToPlugins[pluginName]));
namesToPlugins[pluginName] = PluginModule;
isOrderingDirty = true;
}
}
if (isOrderingDirty) {
recomputePluginOrdering();
}
},
/**
* Looks up the plugin for the supplied event.
*
* @param {object} event A synthetic event.
* @return {?object} The plugin that created the supplied event.
* @internal
*/
getPluginModuleForEvent: function(event) {
var dispatchConfig = event.dispatchConfig;
if (dispatchConfig.registrationName) {
return EventPluginRegistry.registrationNameModules[
dispatchConfig.registrationName
] || null;
}
for (var phase in dispatchConfig.phasedRegistrationNames) {
if (!dispatchConfig.phasedRegistrationNames.hasOwnProperty(phase)) {
continue;
}
var PluginModule = EventPluginRegistry.registrationNameModules[
dispatchConfig.phasedRegistrationNames[phase]
];
if (PluginModule) {
return PluginModule;
}
}
return null;
},
/**
* Exposed for unit testing.
* @private
*/
_resetEventPlugins: function() {
EventPluginOrder = null;
for (var pluginName in namesToPlugins) {
if (namesToPlugins.hasOwnProperty(pluginName)) {
delete namesToPlugins[pluginName];
}
}
EventPluginRegistry.plugins.length = 0;
var eventNameDispatchConfigs = EventPluginRegistry.eventNameDispatchConfigs;
for (var eventName in eventNameDispatchConfigs) {
if (eventNameDispatchConfigs.hasOwnProperty(eventName)) {
delete eventNameDispatchConfigs[eventName];
}
}
var registrationNameModules = EventPluginRegistry.registrationNameModules;
for (var registrationName in registrationNameModules) {
if (registrationNameModules.hasOwnProperty(registrationName)) {
delete registrationNameModules[registrationName];
}
}
}
};
module.exports = EventPluginRegistry;
},{"./invariant":155}],24:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EventPluginUtils
*/
'use strict';
var EventConstants = require("./EventConstants");
var invariant = require("./invariant");
/**
* Injected dependencies:
*/
/**
* - `Mount`: [required] Module that can convert between React dom IDs and
* actual node references.
*/
var injection = {
Mount: null,
injectMount: function(InjectedMount) {
injection.Mount = InjectedMount;
if ("production" !== "production") {
("production" !== "production" ? invariant(
InjectedMount && InjectedMount.getNode,
'EventPluginUtils.injection.injectMount(...): Injected Mount module ' +
'is missing getNode.'
) : invariant(InjectedMount && InjectedMount.getNode));
}
}
};
var topLevelTypes = EventConstants.topLevelTypes;
function isEndish(topLevelType) {
return topLevelType === topLevelTypes.topMouseUp ||
topLevelType === topLevelTypes.topTouchEnd ||
topLevelType === topLevelTypes.topTouchCancel;
}
function isMoveish(topLevelType) {
return topLevelType === topLevelTypes.topMouseMove ||
topLevelType === topLevelTypes.topTouchMove;
}
function isStartish(topLevelType) {
return topLevelType === topLevelTypes.topMouseDown ||
topLevelType === topLevelTypes.topTouchStart;
}
var validateEventDispatches;
if ("production" !== "production") {
validateEventDispatches = function(event) {
var dispatchListeners = event._dispatchListeners;
var dispatchIDs = event._dispatchIDs;
var listenersIsArr = Array.isArray(dispatchListeners);
var idsIsArr = Array.isArray(dispatchIDs);
var IDsLen = idsIsArr ? dispatchIDs.length : dispatchIDs ? 1 : 0;
var listenersLen = listenersIsArr ?
dispatchListeners.length :
dispatchListeners ? 1 : 0;
("production" !== "production" ? invariant(
idsIsArr === listenersIsArr && IDsLen === listenersLen,
'EventPluginUtils: Invalid `event`.'
) : invariant(idsIsArr === listenersIsArr && IDsLen === listenersLen));
};
}
/**
* Invokes `cb(event, listener, id)`. Avoids using call if no scope is
* provided. The `(listener,id)` pair effectively forms the "dispatch" but are
* kept separate to conserve memory.
*/
function forEachEventDispatch(event, cb) {
var dispatchListeners = event._dispatchListeners;
var dispatchIDs = event._dispatchIDs;
if ("production" !== "production") {
validateEventDispatches(event);
}
if (Array.isArray(dispatchListeners)) {
for (var i = 0; i < dispatchListeners.length; i++) {
if (event.isPropagationStopped()) {
break;
}
// Listeners and IDs are two parallel arrays that are always in sync.
cb(event, dispatchListeners[i], dispatchIDs[i]);
}
} else if (dispatchListeners) {
cb(event, dispatchListeners, dispatchIDs);
}
}
/**
* Default implementation of PluginModule.executeDispatch().
* @param {SyntheticEvent} SyntheticEvent to handle
* @param {function} Application-level callback
* @param {string} domID DOM id to pass to the callback.
*/
function executeDispatch(event, listener, domID) {
event.currentTarget = injection.Mount.getNode(domID);
var returnValue = listener(event, domID);
event.currentTarget = null;
return returnValue;
}
/**
* Standard/simple iteration through an event's collected dispatches.
*/
function executeDispatchesInOrder(event, cb) {
forEachEventDispatch(event, cb);
event._dispatchListeners = null;
event._dispatchIDs = null;
}
/**
* Standard/simple iteration through an event's collected dispatches, but stops
* at the first dispatch execution returning true, and returns that id.
*
* @return id of the first dispatch execution who's listener returns true, or
* null if no listener returned true.
*/
function executeDispatchesInOrderStopAtTrueImpl(event) {
var dispatchListeners = event._dispatchListeners;
var dispatchIDs = event._dispatchIDs;
if ("production" !== "production") {
validateEventDispatches(event);
}
if (Array.isArray(dispatchListeners)) {
for (var i = 0; i < dispatchListeners.length; i++) {
if (event.isPropagationStopped()) {
break;
}
// Listeners and IDs are two parallel arrays that are always in sync.
if (dispatchListeners[i](event, dispatchIDs[i])) {
return dispatchIDs[i];
}
}
} else if (dispatchListeners) {
if (dispatchListeners(event, dispatchIDs)) {
return dispatchIDs;
}
}
return null;
}
/**
* @see executeDispatchesInOrderStopAtTrueImpl
*/
function executeDispatchesInOrderStopAtTrue(event) {
var ret = executeDispatchesInOrderStopAtTrueImpl(event);
event._dispatchIDs = null;
event._dispatchListeners = null;
return ret;
}
/**
* Execution of a "direct" dispatch - there must be at most one dispatch
* accumulated on the event or it is considered an error. It doesn't really make
* sense for an event with multiple dispatches (bubbled) to keep track of the
* return values at each dispatch execution, but it does tend to make sense when
* dealing with "direct" dispatches.
*
* @return The return value of executing the single dispatch.
*/
function executeDirectDispatch(event) {
if ("production" !== "production") {
validateEventDispatches(event);
}
var dispatchListener = event._dispatchListeners;
var dispatchID = event._dispatchIDs;
("production" !== "production" ? invariant(
!Array.isArray(dispatchListener),
'executeDirectDispatch(...): Invalid `event`.'
) : invariant(!Array.isArray(dispatchListener)));
var res = dispatchListener ?
dispatchListener(event, dispatchID) :
null;
event._dispatchListeners = null;
event._dispatchIDs = null;
return res;
}
/**
* @param {SyntheticEvent} event
* @return {bool} True iff number of dispatches accumulated is greater than 0.
*/
function hasDispatches(event) {
return !!event._dispatchListeners;
}
/**
* General utilities that are useful in creating custom Event Plugins.
*/
var EventPluginUtils = {
isEndish: isEndish,
isMoveish: isMoveish,
isStartish: isStartish,
executeDirectDispatch: executeDirectDispatch,
executeDispatch: executeDispatch,
executeDispatchesInOrder: executeDispatchesInOrder,
executeDispatchesInOrderStopAtTrue: executeDispatchesInOrderStopAtTrue,
hasDispatches: hasDispatches,
injection: injection,
useTouchEvents: false
};
module.exports = EventPluginUtils;
},{"./EventConstants":20,"./invariant":155}],25:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EventPropagators
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var accumulateInto = require("./accumulateInto");
var forEachAccumulated = require("./forEachAccumulated");
var PropagationPhases = EventConstants.PropagationPhases;
var getListener = EventPluginHub.getListener;
/**
* Some event types have a notion of different registration names for different
* "phases" of propagation. This finds listeners by a given phase.
*/
function listenerAtPhase(id, event, propagationPhase) {
var registrationName =
event.dispatchConfig.phasedRegistrationNames[propagationPhase];
return getListener(id, registrationName);
}
/**
* Tags a `SyntheticEvent` with dispatched listeners. Creating this function
* here, allows us to not have to bind or create functions for each event.
* Mutating the event's members allows us to not have to create a wrapping
* "dispatch" object that pairs the event with the listener.
*/
function accumulateDirectionalDispatches(domID, upwards, event) {
if ("production" !== "production") {
if (!domID) {
throw new Error('Dispatching id must not be null');
}
}
var phase = upwards ? PropagationPhases.bubbled : PropagationPhases.captured;
var listener = listenerAtPhase(domID, event, phase);
if (listener) {
event._dispatchListeners =
accumulateInto(event._dispatchListeners, listener);
event._dispatchIDs = accumulateInto(event._dispatchIDs, domID);
}
}
/**
* Collect dispatches (must be entirely collected before dispatching - see unit
* tests). Lazily allocate the array to conserve memory. We must loop through
* each event and perform the traversal for each one. We can not perform a
* single traversal for the entire collection of events because each event may
* have a different target.
*/
function accumulateTwoPhaseDispatchesSingle(event) {
if (event && event.dispatchConfig.phasedRegistrationNames) {
EventPluginHub.injection.getInstanceHandle().traverseTwoPhase(
event.dispatchMarker,
accumulateDirectionalDispatches,
event
);
}
}
/**
* Accumulates without regard to direction, does not look for phased
* registration names. Same as `accumulateDirectDispatchesSingle` but without
* requiring that the `dispatchMarker` be the same as the dispatched ID.
*/
function accumulateDispatches(id, ignoredDirection, event) {
if (event && event.dispatchConfig.registrationName) {
var registrationName = event.dispatchConfig.registrationName;
var listener = getListener(id, registrationName);
if (listener) {
event._dispatchListeners =
accumulateInto(event._dispatchListeners, listener);
event._dispatchIDs = accumulateInto(event._dispatchIDs, id);
}
}
}
/**
* Accumulates dispatches on an `SyntheticEvent`, but only for the
* `dispatchMarker`.
* @param {SyntheticEvent} event
*/
function accumulateDirectDispatchesSingle(event) {
if (event && event.dispatchConfig.registrationName) {
accumulateDispatches(event.dispatchMarker, null, event);
}
}
function accumulateTwoPhaseDispatches(events) {
forEachAccumulated(events, accumulateTwoPhaseDispatchesSingle);
}
function accumulateEnterLeaveDispatches(leave, enter, fromID, toID) {
EventPluginHub.injection.getInstanceHandle().traverseEnterLeave(
fromID,
toID,
accumulateDispatches,
leave,
enter
);
}
function accumulateDirectDispatches(events) {
forEachAccumulated(events, accumulateDirectDispatchesSingle);
}
/**
* A small set of propagation patterns, each of which will accept a small amount
* of information, and generate a set of "dispatch ready event objects" - which
* are sets of events that have already been annotated with a set of dispatched
* listener functions/ids. The API is designed this way to discourage these
* propagation strategies from actually executing the dispatches, since we
* always want to collect the entire set of dispatches before executing event a
* single one.
*
* @constructor EventPropagators
*/
var EventPropagators = {
accumulateTwoPhaseDispatches: accumulateTwoPhaseDispatches,
accumulateDirectDispatches: accumulateDirectDispatches,
accumulateEnterLeaveDispatches: accumulateEnterLeaveDispatches
};
module.exports = EventPropagators;
},{"./EventConstants":20,"./EventPluginHub":22,"./accumulateInto":123,"./forEachAccumulated":140}],26:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ExecutionEnvironment
*/
/*jslint evil: true */
"use strict";
var canUseDOM = !!(
(typeof window !== 'undefined' &&
window.document && window.document.createElement)
);
/**
* Simple, lightweight module assisting with the detection and context of
* Worker. Helps avoid circular dependencies and allows code to reason about
* whether or not they are in a Worker, even if they never include the main
* `ReactWorker` dependency.
*/
var ExecutionEnvironment = {
canUseDOM: canUseDOM,
canUseWorkers: typeof Worker !== 'undefined',
canUseEventListeners:
canUseDOM && !!(window.addEventListener || window.attachEvent),
canUseViewport: canUseDOM && !!window.screen,
isInWorker: !canUseDOM // For now, this is true - might change in the future.
};
module.exports = ExecutionEnvironment;
},{}],27:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule FallbackCompositionState
* @typechecks static-only
*/
'use strict';
var PooledClass = require("./PooledClass");
var assign = require("./Object.assign");
var getTextContentAccessor = require("./getTextContentAccessor");
/**
* This helper class stores information about text content of a target node,
* allowing comparison of content before and after a given event.
*
* Identify the node where selection currently begins, then observe
* both its text content and its current position in the DOM. Since the
* browser may natively replace the target node during composition, we can
* use its position to find its replacement.
*
* @param {DOMEventTarget} root
*/
function FallbackCompositionState(root) {
this._root = root;
this._startText = this.getText();
this._fallbackText = null;
}
assign(FallbackCompositionState.prototype, {
/**
* Get current text of input.
*
* @return {string}
*/
getText: function() {
if ('value' in this._root) {
return this._root.value;
}
return this._root[getTextContentAccessor()];
},
/**
* Determine the differing substring between the initially stored
* text content and the current content.
*
* @return {string}
*/
getData: function() {
if (this._fallbackText) {
return this._fallbackText;
}
var start;
var startValue = this._startText;
var startLength = startValue.length;
var end;
var endValue = this.getText();
var endLength = endValue.length;
for (start = 0; start < startLength; start++) {
if (startValue[start] !== endValue[start]) {
break;
}
}
var minEnd = startLength - start;
for (end = 1; end <= minEnd; end++) {
if (startValue[startLength - end] !== endValue[endLength - end]) {
break;
}
}
var sliceTail = end > 1 ? 1 - end : undefined;
this._fallbackText = endValue.slice(start, sliceTail);
return this._fallbackText;
}
});
PooledClass.addPoolingTo(FallbackCompositionState);
module.exports = FallbackCompositionState;
},{"./Object.assign":33,"./PooledClass":34,"./getTextContentAccessor":150}],28:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule HTMLDOMPropertyConfig
*/
/*jslint bitwise: true*/
'use strict';
var DOMProperty = require("./DOMProperty");
var ExecutionEnvironment = require("./ExecutionEnvironment");
var MUST_USE_ATTRIBUTE = DOMProperty.injection.MUST_USE_ATTRIBUTE;
var MUST_USE_PROPERTY = DOMProperty.injection.MUST_USE_PROPERTY;
var HAS_BOOLEAN_VALUE = DOMProperty.injection.HAS_BOOLEAN_VALUE;
var HAS_SIDE_EFFECTS = DOMProperty.injection.HAS_SIDE_EFFECTS;
var HAS_NUMERIC_VALUE = DOMProperty.injection.HAS_NUMERIC_VALUE;
var HAS_POSITIVE_NUMERIC_VALUE =
DOMProperty.injection.HAS_POSITIVE_NUMERIC_VALUE;
var HAS_OVERLOADED_BOOLEAN_VALUE =
DOMProperty.injection.HAS_OVERLOADED_BOOLEAN_VALUE;
var hasSVG;
if (ExecutionEnvironment.canUseDOM) {
var implementation = document.implementation;
hasSVG = (
implementation &&
implementation.hasFeature &&
implementation.hasFeature(
'http://www.w3.org/TR/SVG11/feature#BasicStructure',
'1.1'
)
);
}
var HTMLDOMPropertyConfig = {
isCustomAttribute: RegExp.prototype.test.bind(
/^(data|aria)-[a-z_][a-z\d_.\-]*$/
),
Properties: {
/**
* Standard Properties
*/
accept: null,
acceptCharset: null,
accessKey: null,
action: null,
allowFullScreen: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
allowTransparency: MUST_USE_ATTRIBUTE,
alt: null,
async: HAS_BOOLEAN_VALUE,
autoComplete: null,
// autoFocus is polyfilled/normalized by AutoFocusMixin
// autoFocus: HAS_BOOLEAN_VALUE,
autoPlay: HAS_BOOLEAN_VALUE,
cellPadding: null,
cellSpacing: null,
charSet: MUST_USE_ATTRIBUTE,
checked: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
classID: MUST_USE_ATTRIBUTE,
// To set className on SVG elements, it's necessary to use .setAttribute;
// this works on HTML elements too in all browsers except IE8. Conveniently,
// IE8 doesn't support SVG and so we can simply use the attribute in
// browsers that support SVG and the property in browsers that don't,
// regardless of whether the element is HTML or SVG.
className: hasSVG ? MUST_USE_ATTRIBUTE : MUST_USE_PROPERTY,
cols: MUST_USE_ATTRIBUTE | HAS_POSITIVE_NUMERIC_VALUE,
colSpan: null,
content: null,
contentEditable: null,
contextMenu: MUST_USE_ATTRIBUTE,
controls: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
coords: null,
crossOrigin: null,
data: null, // For `<object />` acts as `src`.
dateTime: MUST_USE_ATTRIBUTE,
defer: HAS_BOOLEAN_VALUE,
dir: null,
disabled: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
download: HAS_OVERLOADED_BOOLEAN_VALUE,
draggable: null,
encType: null,
form: MUST_USE_ATTRIBUTE,
formAction: MUST_USE_ATTRIBUTE,
formEncType: MUST_USE_ATTRIBUTE,
formMethod: MUST_USE_ATTRIBUTE,
formNoValidate: HAS_BOOLEAN_VALUE,
formTarget: MUST_USE_ATTRIBUTE,
frameBorder: MUST_USE_ATTRIBUTE,
headers: null,
height: MUST_USE_ATTRIBUTE,
hidden: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
high: null,
href: null,
hrefLang: null,
htmlFor: null,
httpEquiv: null,
icon: null,
id: MUST_USE_PROPERTY,
label: null,
lang: null,
list: MUST_USE_ATTRIBUTE,
loop: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
low: null,
manifest: MUST_USE_ATTRIBUTE,
marginHeight: null,
marginWidth: null,
max: null,
maxLength: MUST_USE_ATTRIBUTE,
media: MUST_USE_ATTRIBUTE,
mediaGroup: null,
method: null,
min: null,
multiple: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
muted: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
name: null,
noValidate: HAS_BOOLEAN_VALUE,
open: HAS_BOOLEAN_VALUE,
optimum: null,
pattern: null,
placeholder: null,
poster: null,
preload: null,
radioGroup: null,
readOnly: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
rel: null,
required: HAS_BOOLEAN_VALUE,
role: MUST_USE_ATTRIBUTE,
rows: MUST_USE_ATTRIBUTE | HAS_POSITIVE_NUMERIC_VALUE,
rowSpan: null,
sandbox: null,
scope: null,
scoped: HAS_BOOLEAN_VALUE,
scrolling: null,
seamless: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
selected: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
shape: null,
size: MUST_USE_ATTRIBUTE | HAS_POSITIVE_NUMERIC_VALUE,
sizes: MUST_USE_ATTRIBUTE,
span: HAS_POSITIVE_NUMERIC_VALUE,
spellCheck: null,
src: null,
srcDoc: MUST_USE_PROPERTY,
srcSet: MUST_USE_ATTRIBUTE,
start: HAS_NUMERIC_VALUE,
step: null,
style: null,
tabIndex: null,
target: null,
title: null,
type: null,
useMap: null,
value: MUST_USE_PROPERTY | HAS_SIDE_EFFECTS,
width: MUST_USE_ATTRIBUTE,
wmode: MUST_USE_ATTRIBUTE,
/**
* Non-standard Properties
*/
// autoCapitalize and autoCorrect are supported in Mobile Safari for
// keyboard hints.
autoCapitalize: null,
autoCorrect: null,
// itemProp, itemScope, itemType are for
// Microdata support. See http://schema.org/docs/gs.html
itemProp: MUST_USE_ATTRIBUTE,
itemScope: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
itemType: MUST_USE_ATTRIBUTE,
// itemID and itemRef are for Microdata support as well but
// only specified in the the WHATWG spec document. See
// https://html.spec.whatwg.org/multipage/microdata.html#microdata-dom-api
itemID: MUST_USE_ATTRIBUTE,
itemRef: MUST_USE_ATTRIBUTE,
// property is supported for OpenGraph in meta tags.
property: null,
// IE-only attribute that controls focus behavior
unselectable: MUST_USE_ATTRIBUTE
},
DOMAttributeNames: {
acceptCharset: 'accept-charset',
className: 'class',
htmlFor: 'for',
httpEquiv: 'http-equiv'
},
DOMPropertyNames: {
autoCapitalize: 'autocapitalize',
autoComplete: 'autocomplete',
autoCorrect: 'autocorrect',
autoFocus: 'autofocus',
autoPlay: 'autoplay',
// `encoding` is equivalent to `enctype`, IE8 lacks an `enctype` setter.
// http://www.w3.org/TR/html5/forms.html#dom-fs-encoding
encType: 'encoding',
hrefLang: 'hreflang',
radioGroup: 'radiogroup',
spellCheck: 'spellcheck',
srcDoc: 'srcdoc',
srcSet: 'srcset'
}
};
module.exports = HTMLDOMPropertyConfig;
},{"./DOMProperty":15,"./ExecutionEnvironment":26}],29:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule LinkedStateMixin
* @typechecks static-only
*/
'use strict';
var ReactLink = require("./ReactLink");
var ReactStateSetters = require("./ReactStateSetters");
/**
* A simple mixin around ReactLink.forState().
*/
var LinkedStateMixin = {
/**
* Create a ReactLink that's linked to part of this component's state. The
* ReactLink will have the current value of this.state[key] and will call
* setState() when a change is requested.
*
* @param {string} key state key to update. Note: you may want to use keyOf()
* if you're using Google Closure Compiler advanced mode.
* @return {ReactLink} ReactLink instance linking to the state.
*/
linkState: function(key) {
return new ReactLink(
this.state[key],
ReactStateSetters.createStateKeySetter(this, key)
);
}
};
module.exports = LinkedStateMixin;
},{"./ReactLink":79,"./ReactStateSetters":98}],30:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule LinkedValueUtils
* @typechecks static-only
*/
'use strict';
var ReactPropTypes = require("./ReactPropTypes");
var invariant = require("./invariant");
var hasReadOnlyValue = {
'button': true,
'checkbox': true,
'image': true,
'hidden': true,
'radio': true,
'reset': true,
'submit': true
};
function _assertSingleLink(input) {
("production" !== "production" ? invariant(
input.props.checkedLink == null || input.props.valueLink == null,
'Cannot provide a checkedLink and a valueLink. If you want to use ' +
'checkedLink, you probably don\'t want to use valueLink and vice versa.'
) : invariant(input.props.checkedLink == null || input.props.valueLink == null));
}
function _assertValueLink(input) {
_assertSingleLink(input);
("production" !== "production" ? invariant(
input.props.value == null && input.props.onChange == null,
'Cannot provide a valueLink and a value or onChange event. If you want ' +
'to use value or onChange, you probably don\'t want to use valueLink.'
) : invariant(input.props.value == null && input.props.onChange == null));
}
function _assertCheckedLink(input) {
_assertSingleLink(input);
("production" !== "production" ? invariant(
input.props.checked == null && input.props.onChange == null,
'Cannot provide a checkedLink and a checked property or onChange event. ' +
'If you want to use checked or onChange, you probably don\'t want to ' +
'use checkedLink'
) : invariant(input.props.checked == null && input.props.onChange == null));
}
/**
* @param {SyntheticEvent} e change event to handle
*/
function _handleLinkedValueChange(e) {
/*jshint validthis:true */
this.props.valueLink.requestChange(e.target.value);
}
/**
* @param {SyntheticEvent} e change event to handle
*/
function _handleLinkedCheckChange(e) {
/*jshint validthis:true */
this.props.checkedLink.requestChange(e.target.checked);
}
/**
* Provide a linked `value` attribute for controlled forms. You should not use
* this outside of the ReactDOM controlled form components.
*/
var LinkedValueUtils = {
Mixin: {
propTypes: {
value: function(props, propName, componentName) {
if (!props[propName] ||
hasReadOnlyValue[props.type] ||
props.onChange ||
props.readOnly ||
props.disabled) {
return null;
}
return new Error(
'You provided a `value` prop to a form field without an ' +
'`onChange` handler. This will render a read-only field. If ' +
'the field should be mutable use `defaultValue`. Otherwise, ' +
'set either `onChange` or `readOnly`.'
);
},
checked: function(props, propName, componentName) {
if (!props[propName] ||
props.onChange ||
props.readOnly ||
props.disabled) {
return null;
}
return new Error(
'You provided a `checked` prop to a form field without an ' +
'`onChange` handler. This will render a read-only field. If ' +
'the field should be mutable use `defaultChecked`. Otherwise, ' +
'set either `onChange` or `readOnly`.'
);
},
onChange: ReactPropTypes.func
}
},
/**
* @param {ReactComponent} input Form component
* @return {*} current value of the input either from value prop or link.
*/
getValue: function(input) {
if (input.props.valueLink) {
_assertValueLink(input);
return input.props.valueLink.value;
}
return input.props.value;
},
/**
* @param {ReactComponent} input Form component
* @return {*} current checked status of the input either from checked prop
* or link.
*/
getChecked: function(input) {
if (input.props.checkedLink) {
_assertCheckedLink(input);
return input.props.checkedLink.value;
}
return input.props.checked;
},
/**
* @param {ReactComponent} input Form component
* @return {function} change callback either from onChange prop or link.
*/
getOnChange: function(input) {
if (input.props.valueLink) {
_assertValueLink(input);
return _handleLinkedValueChange;
} else if (input.props.checkedLink) {
_assertCheckedLink(input);
return _handleLinkedCheckChange;
}
return input.props.onChange;
}
};
module.exports = LinkedValueUtils;
},{"./ReactPropTypes":90,"./invariant":155}],31:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule LocalEventTrapMixin
*/
'use strict';
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var accumulateInto = require("./accumulateInto");
var forEachAccumulated = require("./forEachAccumulated");
var invariant = require("./invariant");
function remove(event) {
event.remove();
}
var LocalEventTrapMixin = {
trapBubbledEvent:function(topLevelType, handlerBaseName) {
("production" !== "production" ? invariant(this.isMounted(), 'Must be mounted to trap events') : invariant(this.isMounted()));
// If a component renders to null or if another component fatals and causes
// the state of the tree to be corrupted, `node` here can be null.
var node = this.getDOMNode();
("production" !== "production" ? invariant(
node,
'LocalEventTrapMixin.trapBubbledEvent(...): Requires node to be rendered.'
) : invariant(node));
var listener = ReactBrowserEventEmitter.trapBubbledEvent(
topLevelType,
handlerBaseName,
node
);
this._localEventListeners =
accumulateInto(this._localEventListeners, listener);
},
// trapCapturedEvent would look nearly identical. We don't implement that
// method because it isn't currently needed.
componentWillUnmount:function() {
if (this._localEventListeners) {
forEachAccumulated(this._localEventListeners, remove);
}
}
};
module.exports = LocalEventTrapMixin;
},{"./ReactBrowserEventEmitter":37,"./accumulateInto":123,"./forEachAccumulated":140,"./invariant":155}],32:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule MobileSafariClickEventPlugin
* @typechecks static-only
*/
'use strict';
var EventConstants = require("./EventConstants");
var emptyFunction = require("./emptyFunction");
var topLevelTypes = EventConstants.topLevelTypes;
/**
* Mobile Safari does not fire properly bubble click events on non-interactive
* elements, which means delegated click listeners do not fire. The workaround
* for this bug involves attaching an empty click listener on the target node.
*
* This particular plugin works around the bug by attaching an empty click
* listener on `touchstart` (which does fire on every element).
*/
var MobileSafariClickEventPlugin = {
eventTypes: null,
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @see {EventPluginHub.extractEvents}
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
if (topLevelType === topLevelTypes.topTouchStart) {
var target = nativeEvent.target;
if (target && !target.onclick) {
target.onclick = emptyFunction;
}
}
}
};
module.exports = MobileSafariClickEventPlugin;
},{"./EventConstants":20,"./emptyFunction":134}],33:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule Object.assign
*/
// https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign
'use strict';
function assign(target, sources) {
if (target == null) {
throw new TypeError('Object.assign target cannot be null or undefined');
}
var to = Object(target);
var hasOwnProperty = Object.prototype.hasOwnProperty;
for (var nextIndex = 1; nextIndex < arguments.length; nextIndex++) {
var nextSource = arguments[nextIndex];
if (nextSource == null) {
continue;
}
var from = Object(nextSource);
// We don't currently support accessors nor proxies. Therefore this
// copy cannot throw. If we ever supported this then we must handle
// exceptions and side-effects. We don't support symbols so they won't
// be transferred.
for (var key in from) {
if (hasOwnProperty.call(from, key)) {
to[key] = from[key];
}
}
}
return to;
}
module.exports = assign;
},{}],34:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule PooledClass
*/
'use strict';
var invariant = require("./invariant");
/**
* Static poolers. Several custom versions for each potential number of
* arguments. A completely generic pooler is easy to implement, but would
* require accessing the `arguments` object. In each of these, `this` refers to
* the Class itself, not an instance. If any others are needed, simply add them
* here, or in their own files.
*/
var oneArgumentPooler = function(copyFieldsFrom) {
var Klass = this;
if (Klass.instancePool.length) {
var instance = Klass.instancePool.pop();
Klass.call(instance, copyFieldsFrom);
return instance;
} else {
return new Klass(copyFieldsFrom);
}
};
var twoArgumentPooler = function(a1, a2) {
var Klass = this;
if (Klass.instancePool.length) {
var instance = Klass.instancePool.pop();
Klass.call(instance, a1, a2);
return instance;
} else {
return new Klass(a1, a2);
}
};
var threeArgumentPooler = function(a1, a2, a3) {
var Klass = this;
if (Klass.instancePool.length) {
var instance = Klass.instancePool.pop();
Klass.call(instance, a1, a2, a3);
return instance;
} else {
return new Klass(a1, a2, a3);
}
};
var fiveArgumentPooler = function(a1, a2, a3, a4, a5) {
var Klass = this;
if (Klass.instancePool.length) {
var instance = Klass.instancePool.pop();
Klass.call(instance, a1, a2, a3, a4, a5);
return instance;
} else {
return new Klass(a1, a2, a3, a4, a5);
}
};
var standardReleaser = function(instance) {
var Klass = this;
("production" !== "production" ? invariant(
instance instanceof Klass,
'Trying to release an instance into a pool of a different type.'
) : invariant(instance instanceof Klass));
if (instance.destructor) {
instance.destructor();
}
if (Klass.instancePool.length < Klass.poolSize) {
Klass.instancePool.push(instance);
}
};
var DEFAULT_POOL_SIZE = 10;
var DEFAULT_POOLER = oneArgumentPooler;
/**
* Augments `CopyConstructor` to be a poolable class, augmenting only the class
* itself (statically) not adding any prototypical fields. Any CopyConstructor
* you give this may have a `poolSize` property, and will look for a
* prototypical `destructor` on instances (optional).
*
* @param {Function} CopyConstructor Constructor that can be used to reset.
* @param {Function} pooler Customizable pooler.
*/
var addPoolingTo = function(CopyConstructor, pooler) {
var NewKlass = CopyConstructor;
NewKlass.instancePool = [];
NewKlass.getPooled = pooler || DEFAULT_POOLER;
if (!NewKlass.poolSize) {
NewKlass.poolSize = DEFAULT_POOL_SIZE;
}
NewKlass.release = standardReleaser;
return NewKlass;
};
var PooledClass = {
addPoolingTo: addPoolingTo,
oneArgumentPooler: oneArgumentPooler,
twoArgumentPooler: twoArgumentPooler,
threeArgumentPooler: threeArgumentPooler,
fiveArgumentPooler: fiveArgumentPooler
};
module.exports = PooledClass;
},{"./invariant":155}],35:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule React
*/
/* globals __REACT_DEVTOOLS_GLOBAL_HOOK__*/
'use strict';
var EventPluginUtils = require("./EventPluginUtils");
var ReactChildren = require("./ReactChildren");
var ReactComponent = require("./ReactComponent");
var ReactClass = require("./ReactClass");
var ReactContext = require("./ReactContext");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactElement = require("./ReactElement");
var ReactElementValidator = require("./ReactElementValidator");
var ReactDOM = require("./ReactDOM");
var ReactDOMTextComponent = require("./ReactDOMTextComponent");
var ReactDefaultInjection = require("./ReactDefaultInjection");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var ReactMount = require("./ReactMount");
var ReactPerf = require("./ReactPerf");
var ReactPropTypes = require("./ReactPropTypes");
var ReactReconciler = require("./ReactReconciler");
var ReactServerRendering = require("./ReactServerRendering");
var assign = require("./Object.assign");
var findDOMNode = require("./findDOMNode");
var onlyChild = require("./onlyChild");
ReactDefaultInjection.inject();
var createElement = ReactElement.createElement;
var createFactory = ReactElement.createFactory;
var cloneElement = ReactElement.cloneElement;
if ("production" !== "production") {
createElement = ReactElementValidator.createElement;
createFactory = ReactElementValidator.createFactory;
cloneElement = ReactElementValidator.cloneElement;
}
var render = ReactPerf.measure('React', 'render', ReactMount.render);
var React = {
Children: {
map: ReactChildren.map,
forEach: ReactChildren.forEach,
count: ReactChildren.count,
only: onlyChild
},
Component: ReactComponent,
DOM: ReactDOM,
PropTypes: ReactPropTypes,
initializeTouchEvents: function(shouldUseTouch) {
EventPluginUtils.useTouchEvents = shouldUseTouch;
},
createClass: ReactClass.createClass,
createElement: createElement,
cloneElement: cloneElement,
createFactory: createFactory,
createMixin: function(mixin) {
// Currently a noop. Will be used to validate and trace mixins.
return mixin;
},
constructAndRenderComponent: ReactMount.constructAndRenderComponent,
constructAndRenderComponentByID: ReactMount.constructAndRenderComponentByID,
findDOMNode: findDOMNode,
render: render,
renderToString: ReactServerRendering.renderToString,
renderToStaticMarkup: ReactServerRendering.renderToStaticMarkup,
unmountComponentAtNode: ReactMount.unmountComponentAtNode,
isValidElement: ReactElement.isValidElement,
withContext: ReactContext.withContext,
// Hook for JSX spread, don't use this for anything else.
__spread: assign
};
// Inject the runtime into a devtools global hook regardless of browser.
// Allows for debugging when the hook is injected on the page.
if (
typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ !== 'undefined' &&
typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.inject === 'function') {
__REACT_DEVTOOLS_GLOBAL_HOOK__.inject({
CurrentOwner: ReactCurrentOwner,
InstanceHandles: ReactInstanceHandles,
Mount: ReactMount,
Reconciler: ReactReconciler,
TextComponent: ReactDOMTextComponent
});
}
if ("production" !== "production") {
var ExecutionEnvironment = require("./ExecutionEnvironment");
if (ExecutionEnvironment.canUseDOM && window.top === window.self) {
// If we're in Chrome, look for the devtools marker and provide a download
// link if not installed.
if (navigator.userAgent.indexOf('Chrome') > -1) {
if (typeof __REACT_DEVTOOLS_GLOBAL_HOOK__ === 'undefined') {
console.debug(
'Download the React DevTools for a better development experience: ' +
'https://fb.me/react-devtools'
);
}
}
var expectedFeatures = [
// shims
Array.isArray,
Array.prototype.every,
Array.prototype.forEach,
Array.prototype.indexOf,
Array.prototype.map,
Date.now,
Function.prototype.bind,
Object.keys,
String.prototype.split,
String.prototype.trim,
// shams
Object.create,
Object.freeze
];
for (var i = 0; i < expectedFeatures.length; i++) {
if (!expectedFeatures[i]) {
console.error(
'One or more ES5 shim/shams expected by React are not available: ' +
'https://fb.me/react-warning-polyfills'
);
break;
}
}
}
}
React.version = '0.13.3';
module.exports = React;
},{"./EventPluginUtils":24,"./ExecutionEnvironment":26,"./Object.assign":33,"./ReactChildren":41,"./ReactClass":42,"./ReactComponent":43,"./ReactContext":48,"./ReactCurrentOwner":49,"./ReactDOM":50,"./ReactDOMTextComponent":61,"./ReactDefaultInjection":64,"./ReactElement":67,"./ReactElementValidator":68,"./ReactInstanceHandles":76,"./ReactMount":81,"./ReactPerf":86,"./ReactPropTypes":90,"./ReactReconciler":93,"./ReactServerRendering":96,"./findDOMNode":137,"./onlyChild":165}],36:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactBrowserComponentMixin
*/
'use strict';
var findDOMNode = require("./findDOMNode");
var ReactBrowserComponentMixin = {
/**
* Returns the DOM node rendered by this component.
*
* @return {DOMElement} The root node of this component.
* @final
* @protected
*/
getDOMNode: function() {
return findDOMNode(this);
}
};
module.exports = ReactBrowserComponentMixin;
},{"./findDOMNode":137}],37:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactBrowserEventEmitter
* @typechecks static-only
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var EventPluginRegistry = require("./EventPluginRegistry");
var ReactEventEmitterMixin = require("./ReactEventEmitterMixin");
var ViewportMetrics = require("./ViewportMetrics");
var assign = require("./Object.assign");
var isEventSupported = require("./isEventSupported");
/**
* Summary of `ReactBrowserEventEmitter` event handling:
*
* - Top-level delegation is used to trap most native browser events. This
* may only occur in the main thread and is the responsibility of
* ReactEventListener, which is injected and can therefore support pluggable
* event sources. This is the only work that occurs in the main thread.
*
* - We normalize and de-duplicate events to account for browser quirks. This
* may be done in the worker thread.
*
* - Forward these native events (with the associated top-level type used to
* trap it) to `EventPluginHub`, which in turn will ask plugins if they want
* to extract any synthetic events.
*
* - The `EventPluginHub` will then process each event by annotating them with
* "dispatches", a sequence of listeners and IDs that care about that event.
*
* - The `EventPluginHub` then dispatches the events.
*
* Overview of React and the event system:
*
* +------------+ .
* | DOM | .
* +------------+ .
* | .
* v .
* +------------+ .
* | ReactEvent | .
* | Listener | .
* +------------+ . +-----------+
* | . +--------+|SimpleEvent|
* | . | |Plugin |
* +-----|------+ . v +-----------+
* | | | . +--------------+ +------------+
* | +-----------.--->|EventPluginHub| | Event |
* | | . | | +-----------+ | Propagators|
* | ReactEvent | . | | |TapEvent | |------------|
* | Emitter | . | |<---+|Plugin | |other plugin|
* | | . | | +-----------+ | utilities |
* | +-----------.--->| | +------------+
* | | | . +--------------+
* +-----|------+ . ^ +-----------+
* | . | |Enter/Leave|
* + . +-------+|Plugin |
* +-------------+ . +-----------+
* | application | .
* |-------------| .
* | | .
* | | .
* +-------------+ .
* .
* React Core . General Purpose Event Plugin System
*/
var alreadyListeningTo = {};
var isMonitoringScrollValue = false;
var reactTopListenersCounter = 0;
// For events like 'submit' which don't consistently bubble (which we trap at a
// lower node than `document`), binding at `document` would cause duplicate
// events so we don't include them here
var topEventMapping = {
topBlur: 'blur',
topChange: 'change',
topClick: 'click',
topCompositionEnd: 'compositionend',
topCompositionStart: 'compositionstart',
topCompositionUpdate: 'compositionupdate',
topContextMenu: 'contextmenu',
topCopy: 'copy',
topCut: 'cut',
topDoubleClick: 'dblclick',
topDrag: 'drag',
topDragEnd: 'dragend',
topDragEnter: 'dragenter',
topDragExit: 'dragexit',
topDragLeave: 'dragleave',
topDragOver: 'dragover',
topDragStart: 'dragstart',
topDrop: 'drop',
topFocus: 'focus',
topInput: 'input',
topKeyDown: 'keydown',
topKeyPress: 'keypress',
topKeyUp: 'keyup',
topMouseDown: 'mousedown',
topMouseMove: 'mousemove',
topMouseOut: 'mouseout',
topMouseOver: 'mouseover',
topMouseUp: 'mouseup',
topPaste: 'paste',
topScroll: 'scroll',
topSelectionChange: 'selectionchange',
topTextInput: 'textInput',
topTouchCancel: 'touchcancel',
topTouchEnd: 'touchend',
topTouchMove: 'touchmove',
topTouchStart: 'touchstart',
topWheel: 'wheel'
};
/**
* To ensure no conflicts with other potential React instances on the page
*/
var topListenersIDKey = '_reactListenersID' + String(Math.random()).slice(2);
function getListeningForDocument(mountAt) {
// In IE8, `mountAt` is a host object and doesn't have `hasOwnProperty`
// directly.
if (!Object.prototype.hasOwnProperty.call(mountAt, topListenersIDKey)) {
mountAt[topListenersIDKey] = reactTopListenersCounter++;
alreadyListeningTo[mountAt[topListenersIDKey]] = {};
}
return alreadyListeningTo[mountAt[topListenersIDKey]];
}
/**
* `ReactBrowserEventEmitter` is used to attach top-level event listeners. For
* example:
*
* ReactBrowserEventEmitter.putListener('myID', 'onClick', myFunction);
*
* This would allocate a "registration" of `('onClick', myFunction)` on 'myID'.
*
* @internal
*/
var ReactBrowserEventEmitter = assign({}, ReactEventEmitterMixin, {
/**
* Injectable event backend
*/
ReactEventListener: null,
injection: {
/**
* @param {object} ReactEventListener
*/
injectReactEventListener: function(ReactEventListener) {
ReactEventListener.setHandleTopLevel(
ReactBrowserEventEmitter.handleTopLevel
);
ReactBrowserEventEmitter.ReactEventListener = ReactEventListener;
}
},
/**
* Sets whether or not any created callbacks should be enabled.
*
* @param {boolean} enabled True if callbacks should be enabled.
*/
setEnabled: function(enabled) {
if (ReactBrowserEventEmitter.ReactEventListener) {
ReactBrowserEventEmitter.ReactEventListener.setEnabled(enabled);
}
},
/**
* @return {boolean} True if callbacks are enabled.
*/
isEnabled: function() {
return !!(
(ReactBrowserEventEmitter.ReactEventListener && ReactBrowserEventEmitter.ReactEventListener.isEnabled())
);
},
/**
* We listen for bubbled touch events on the document object.
*
* Firefox v8.01 (and possibly others) exhibited strange behavior when
* mounting `onmousemove` events at some node that was not the document
* element. The symptoms were that if your mouse is not moving over something
* contained within that mount point (for example on the background) the
* top-level listeners for `onmousemove` won't be called. However, if you
* register the `mousemove` on the document object, then it will of course
* catch all `mousemove`s. This along with iOS quirks, justifies restricting
* top-level listeners to the document object only, at least for these
* movement types of events and possibly all events.
*
* @see http://www.quirksmode.org/blog/archives/2010/09/click_event_del.html
*
* Also, `keyup`/`keypress`/`keydown` do not bubble to the window on IE, but
* they bubble to document.
*
* @param {string} registrationName Name of listener (e.g. `onClick`).
* @param {object} contentDocumentHandle Document which owns the container
*/
listenTo: function(registrationName, contentDocumentHandle) {
var mountAt = contentDocumentHandle;
var isListening = getListeningForDocument(mountAt);
var dependencies = EventPluginRegistry.
registrationNameDependencies[registrationName];
var topLevelTypes = EventConstants.topLevelTypes;
for (var i = 0, l = dependencies.length; i < l; i++) {
var dependency = dependencies[i];
if (!(
(isListening.hasOwnProperty(dependency) && isListening[dependency])
)) {
if (dependency === topLevelTypes.topWheel) {
if (isEventSupported('wheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'wheel',
mountAt
);
} else if (isEventSupported('mousewheel')) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'mousewheel',
mountAt
);
} else {
// Firefox needs to capture a different mouse scroll event.
// @see http://www.quirksmode.org/dom/events/tests/scroll.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topWheel,
'DOMMouseScroll',
mountAt
);
}
} else if (dependency === topLevelTypes.topScroll) {
if (isEventSupported('scroll', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topScroll,
'scroll',
mountAt
);
} else {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topScroll,
'scroll',
ReactBrowserEventEmitter.ReactEventListener.WINDOW_HANDLE
);
}
} else if (dependency === topLevelTypes.topFocus ||
dependency === topLevelTypes.topBlur) {
if (isEventSupported('focus', true)) {
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topFocus,
'focus',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelTypes.topBlur,
'blur',
mountAt
);
} else if (isEventSupported('focusin')) {
// IE has `focusin` and `focusout` events which bubble.
// @see http://www.quirksmode.org/blog/archives/2008/04/delegating_the.html
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topFocus,
'focusin',
mountAt
);
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelTypes.topBlur,
'focusout',
mountAt
);
}
// to make sure blur and focus event listeners are only attached once
isListening[topLevelTypes.topBlur] = true;
isListening[topLevelTypes.topFocus] = true;
} else if (topEventMapping.hasOwnProperty(dependency)) {
ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
dependency,
topEventMapping[dependency],
mountAt
);
}
isListening[dependency] = true;
}
}
},
trapBubbledEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapBubbledEvent(
topLevelType,
handlerBaseName,
handle
);
},
trapCapturedEvent: function(topLevelType, handlerBaseName, handle) {
return ReactBrowserEventEmitter.ReactEventListener.trapCapturedEvent(
topLevelType,
handlerBaseName,
handle
);
},
/**
* Listens to window scroll and resize events. We cache scroll values so that
* application code can access them without triggering reflows.
*
* NOTE: Scroll events do not bubble.
*
* @see http://www.quirksmode.org/dom/events/scroll.html
*/
ensureScrollValueMonitoring: function() {
if (!isMonitoringScrollValue) {
var refresh = ViewportMetrics.refreshScrollValues;
ReactBrowserEventEmitter.ReactEventListener.monitorScrollValue(refresh);
isMonitoringScrollValue = true;
}
},
eventNameDispatchConfigs: EventPluginHub.eventNameDispatchConfigs,
registrationNameModules: EventPluginHub.registrationNameModules,
putListener: EventPluginHub.putListener,
getListener: EventPluginHub.getListener,
deleteListener: EventPluginHub.deleteListener,
deleteAllListeners: EventPluginHub.deleteAllListeners
});
module.exports = ReactBrowserEventEmitter;
},{"./EventConstants":20,"./EventPluginHub":22,"./EventPluginRegistry":23,"./Object.assign":33,"./ReactEventEmitterMixin":71,"./ViewportMetrics":122,"./isEventSupported":156}],38:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @typechecks
* @providesModule ReactCSSTransitionGroup
*/
'use strict';
var React = require("./React");
var assign = require("./Object.assign");
var ReactTransitionGroup = React.createFactory(
require("./ReactTransitionGroup")
);
var ReactCSSTransitionGroupChild = React.createFactory(
require("./ReactCSSTransitionGroupChild")
);
var ReactCSSTransitionGroup = React.createClass({
displayName: 'ReactCSSTransitionGroup',
propTypes: {
transitionName: React.PropTypes.string.isRequired,
transitionAppear: React.PropTypes.bool,
transitionEnter: React.PropTypes.bool,
transitionLeave: React.PropTypes.bool
},
getDefaultProps: function() {
return {
transitionAppear: false,
transitionEnter: true,
transitionLeave: true
};
},
_wrapChild: function(child) {
// We need to provide this childFactory so that
// ReactCSSTransitionGroupChild can receive updates to name, enter, and
// leave while it is leaving.
return ReactCSSTransitionGroupChild(
{
name: this.props.transitionName,
appear: this.props.transitionAppear,
enter: this.props.transitionEnter,
leave: this.props.transitionLeave
},
child
);
},
render: function() {
return (
ReactTransitionGroup(
assign({}, this.props, {childFactory: this._wrapChild})
)
);
}
});
module.exports = ReactCSSTransitionGroup;
},{"./Object.assign":33,"./React":35,"./ReactCSSTransitionGroupChild":39,"./ReactTransitionGroup":102}],39:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @typechecks
* @providesModule ReactCSSTransitionGroupChild
*/
'use strict';
var React = require("./React");
var CSSCore = require("./CSSCore");
var ReactTransitionEvents = require("./ReactTransitionEvents");
var onlyChild = require("./onlyChild");
var warning = require("./warning");
// We don't remove the element from the DOM until we receive an animationend or
// transitionend event. If the user screws up and forgets to add an animation
// their node will be stuck in the DOM forever, so we detect if an animation
// does not start and if it doesn't, we just call the end listener immediately.
var TICK = 17;
var NO_EVENT_TIMEOUT = 5000;
var noEventListener = null;
if ("production" !== "production") {
noEventListener = function() {
("production" !== "production" ? warning(
false,
'transition(): tried to perform an animation without ' +
'an animationend or transitionend event after timeout (' +
'%sms). You should either disable this ' +
'transition in JS or add a CSS animation/transition.',
NO_EVENT_TIMEOUT
) : null);
};
}
var ReactCSSTransitionGroupChild = React.createClass({
displayName: 'ReactCSSTransitionGroupChild',
transition: function(animationType, finishCallback) {
var node = this.getDOMNode();
var className = this.props.name + '-' + animationType;
var activeClassName = className + '-active';
var noEventTimeout = null;
var endListener = function(e) {
if (e && e.target !== node) {
return;
}
if ("production" !== "production") {
clearTimeout(noEventTimeout);
}
CSSCore.removeClass(node, className);
CSSCore.removeClass(node, activeClassName);
ReactTransitionEvents.removeEndEventListener(node, endListener);
// Usually this optional callback is used for informing an owner of
// a leave animation and telling it to remove the child.
if (finishCallback) {
finishCallback();
}
};
ReactTransitionEvents.addEndEventListener(node, endListener);
CSSCore.addClass(node, className);
// Need to do this to actually trigger a transition.
this.queueClass(activeClassName);
if ("production" !== "production") {
noEventTimeout = setTimeout(noEventListener, NO_EVENT_TIMEOUT);
}
},
queueClass: function(className) {
this.classNameQueue.push(className);
if (!this.timeout) {
this.timeout = setTimeout(this.flushClassNameQueue, TICK);
}
},
flushClassNameQueue: function() {
if (this.isMounted()) {
this.classNameQueue.forEach(
CSSCore.addClass.bind(CSSCore, this.getDOMNode())
);
}
this.classNameQueue.length = 0;
this.timeout = null;
},
componentWillMount: function() {
this.classNameQueue = [];
},
componentWillUnmount: function() {
if (this.timeout) {
clearTimeout(this.timeout);
}
},
componentWillAppear: function(done) {
if (this.props.appear) {
this.transition('appear', done);
} else {
done();
}
},
componentWillEnter: function(done) {
if (this.props.enter) {
this.transition('enter', done);
} else {
done();
}
},
componentWillLeave: function(done) {
if (this.props.leave) {
this.transition('leave', done);
} else {
done();
}
},
render: function() {
return onlyChild(this.props.children);
}
});
module.exports = ReactCSSTransitionGroupChild;
},{"./CSSCore":8,"./React":35,"./ReactTransitionEvents":101,"./onlyChild":165,"./warning":176}],40:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactChildReconciler
* @typechecks static-only
*/
'use strict';
var ReactReconciler = require("./ReactReconciler");
var flattenChildren = require("./flattenChildren");
var instantiateReactComponent = require("./instantiateReactComponent");
var shouldUpdateReactComponent = require("./shouldUpdateReactComponent");
/**
* ReactChildReconciler provides helpers for initializing or updating a set of
* children. Its output is suitable for passing it onto ReactMultiChild which
* does diffed reordering and insertion.
*/
var ReactChildReconciler = {
/**
* Generates a "mount image" for each of the supplied children. In the case
* of `ReactDOMComponent`, a mount image is a string of markup.
*
* @param {?object} nestedChildNodes Nested child maps.
* @return {?object} A set of child instances.
* @internal
*/
instantiateChildren: function(nestedChildNodes, transaction, context) {
var children = flattenChildren(nestedChildNodes);
for (var name in children) {
if (children.hasOwnProperty(name)) {
var child = children[name];
// The rendered children must be turned into instances as they're
// mounted.
var childInstance = instantiateReactComponent(child, null);
children[name] = childInstance;
}
}
return children;
},
/**
* Updates the rendered children and returns a new set of children.
*
* @param {?object} prevChildren Previously initialized set of children.
* @param {?object} nextNestedChildNodes Nested child maps.
* @param {ReactReconcileTransaction} transaction
* @param {object} context
* @return {?object} A new set of child instances.
* @internal
*/
updateChildren: function(
prevChildren,
nextNestedChildNodes,
transaction,
context) {
// We currently don't have a way to track moves here but if we use iterators
// instead of for..in we can zip the iterators and check if an item has
// moved.
// TODO: If nothing has changed, return the prevChildren object so that we
// can quickly bailout if nothing has changed.
var nextChildren = flattenChildren(nextNestedChildNodes);
if (!nextChildren && !prevChildren) {
return null;
}
var name;
for (name in nextChildren) {
if (!nextChildren.hasOwnProperty(name)) {
continue;
}
var prevChild = prevChildren && prevChildren[name];
var prevElement = prevChild && prevChild._currentElement;
var nextElement = nextChildren[name];
if (shouldUpdateReactComponent(prevElement, nextElement)) {
ReactReconciler.receiveComponent(
prevChild, nextElement, transaction, context
);
nextChildren[name] = prevChild;
} else {
if (prevChild) {
ReactReconciler.unmountComponent(prevChild, name);
}
// The child must be instantiated before it's mounted.
var nextChildInstance = instantiateReactComponent(
nextElement,
null
);
nextChildren[name] = nextChildInstance;
}
}
// Unmount children that are no longer present.
for (name in prevChildren) {
if (prevChildren.hasOwnProperty(name) &&
!(nextChildren && nextChildren.hasOwnProperty(name))) {
ReactReconciler.unmountComponent(prevChildren[name]);
}
}
return nextChildren;
},
/**
* Unmounts all rendered children. This should be used to clean up children
* when this component is unmounted.
*
* @param {?object} renderedChildren Previously initialized set of children.
* @internal
*/
unmountChildren: function(renderedChildren) {
for (var name in renderedChildren) {
var renderedChild = renderedChildren[name];
ReactReconciler.unmountComponent(renderedChild);
}
}
};
module.exports = ReactChildReconciler;
},{"./ReactReconciler":93,"./flattenChildren":138,"./instantiateReactComponent":154,"./shouldUpdateReactComponent":172}],41:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactChildren
*/
'use strict';
var PooledClass = require("./PooledClass");
var ReactFragment = require("./ReactFragment");
var traverseAllChildren = require("./traverseAllChildren");
var warning = require("./warning");
var twoArgumentPooler = PooledClass.twoArgumentPooler;
var threeArgumentPooler = PooledClass.threeArgumentPooler;
/**
* PooledClass representing the bookkeeping associated with performing a child
* traversal. Allows avoiding binding callbacks.
*
* @constructor ForEachBookKeeping
* @param {!function} forEachFunction Function to perform traversal with.
* @param {?*} forEachContext Context to perform context with.
*/
function ForEachBookKeeping(forEachFunction, forEachContext) {
this.forEachFunction = forEachFunction;
this.forEachContext = forEachContext;
}
PooledClass.addPoolingTo(ForEachBookKeeping, twoArgumentPooler);
function forEachSingleChild(traverseContext, child, name, i) {
var forEachBookKeeping = traverseContext;
forEachBookKeeping.forEachFunction.call(
forEachBookKeeping.forEachContext, child, i);
}
/**
* Iterates through children that are typically specified as `props.children`.
*
* The provided forEachFunc(child, index) will be called for each
* leaf child.
*
* @param {?*} children Children tree container.
* @param {function(*, int)} forEachFunc.
* @param {*} forEachContext Context for forEachContext.
*/
function forEachChildren(children, forEachFunc, forEachContext) {
if (children == null) {
return children;
}
var traverseContext =
ForEachBookKeeping.getPooled(forEachFunc, forEachContext);
traverseAllChildren(children, forEachSingleChild, traverseContext);
ForEachBookKeeping.release(traverseContext);
}
/**
* PooledClass representing the bookkeeping associated with performing a child
* mapping. Allows avoiding binding callbacks.
*
* @constructor MapBookKeeping
* @param {!*} mapResult Object containing the ordered map of results.
* @param {!function} mapFunction Function to perform mapping with.
* @param {?*} mapContext Context to perform mapping with.
*/
function MapBookKeeping(mapResult, mapFunction, mapContext) {
this.mapResult = mapResult;
this.mapFunction = mapFunction;
this.mapContext = mapContext;
}
PooledClass.addPoolingTo(MapBookKeeping, threeArgumentPooler);
function mapSingleChildIntoContext(traverseContext, child, name, i) {
var mapBookKeeping = traverseContext;
var mapResult = mapBookKeeping.mapResult;
var keyUnique = !mapResult.hasOwnProperty(name);
if ("production" !== "production") {
("production" !== "production" ? warning(
keyUnique,
'ReactChildren.map(...): Encountered two children with the same key, ' +
'`%s`. Child keys must be unique; when two children share a key, only ' +
'the first child will be used.',
name
) : null);
}
if (keyUnique) {
var mappedChild =
mapBookKeeping.mapFunction.call(mapBookKeeping.mapContext, child, i);
mapResult[name] = mappedChild;
}
}
/**
* Maps children that are typically specified as `props.children`.
*
* The provided mapFunction(child, key, index) will be called for each
* leaf child.
*
* TODO: This may likely break any calls to `ReactChildren.map` that were
* previously relying on the fact that we guarded against null children.
*
* @param {?*} children Children tree container.
* @param {function(*, int)} mapFunction.
* @param {*} mapContext Context for mapFunction.
* @return {object} Object containing the ordered map of results.
*/
function mapChildren(children, func, context) {
if (children == null) {
return children;
}
var mapResult = {};
var traverseContext = MapBookKeeping.getPooled(mapResult, func, context);
traverseAllChildren(children, mapSingleChildIntoContext, traverseContext);
MapBookKeeping.release(traverseContext);
return ReactFragment.create(mapResult);
}
function forEachSingleChildDummy(traverseContext, child, name, i) {
return null;
}
/**
* Count the number of children that are typically specified as
* `props.children`.
*
* @param {?*} children Children tree container.
* @return {number} The number of children.
*/
function countChildren(children, context) {
return traverseAllChildren(children, forEachSingleChildDummy, null);
}
var ReactChildren = {
forEach: forEachChildren,
map: mapChildren,
count: countChildren
};
module.exports = ReactChildren;
},{"./PooledClass":34,"./ReactFragment":73,"./traverseAllChildren":174,"./warning":176}],42:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactClass
*/
'use strict';
var ReactComponent = require("./ReactComponent");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactElement = require("./ReactElement");
var ReactErrorUtils = require("./ReactErrorUtils");
var ReactInstanceMap = require("./ReactInstanceMap");
var ReactLifeCycle = require("./ReactLifeCycle");
var ReactPropTypeLocations = require("./ReactPropTypeLocations");
var ReactPropTypeLocationNames = require("./ReactPropTypeLocationNames");
var ReactUpdateQueue = require("./ReactUpdateQueue");
var assign = require("./Object.assign");
var invariant = require("./invariant");
var keyMirror = require("./keyMirror");
var keyOf = require("./keyOf");
var warning = require("./warning");
var MIXINS_KEY = keyOf({mixins: null});
/**
* Policies that describe methods in `ReactClassInterface`.
*/
var SpecPolicy = keyMirror({
/**
* These methods may be defined only once by the class specification or mixin.
*/
DEFINE_ONCE: null,
/**
* These methods may be defined by both the class specification and mixins.
* Subsequent definitions will be chained. These methods must return void.
*/
DEFINE_MANY: null,
/**
* These methods are overriding the base class.
*/
OVERRIDE_BASE: null,
/**
* These methods are similar to DEFINE_MANY, except we assume they return
* objects. We try to merge the keys of the return values of all the mixed in
* functions. If there is a key conflict we throw.
*/
DEFINE_MANY_MERGED: null
});
var injectedMixins = [];
/**
* Composite components are higher-level components that compose other composite
* or native components.
*
* To create a new type of `ReactClass`, pass a specification of
* your new class to `React.createClass`. The only requirement of your class
* specification is that you implement a `render` method.
*
* var MyComponent = React.createClass({
* render: function() {
* return <div>Hello World</div>;
* }
* });
*
* The class specification supports a specific protocol of methods that have
* special meaning (e.g. `render`). See `ReactClassInterface` for
* more the comprehensive protocol. Any other properties and methods in the
* class specification will available on the prototype.
*
* @interface ReactClassInterface
* @internal
*/
var ReactClassInterface = {
/**
* An array of Mixin objects to include when defining your component.
*
* @type {array}
* @optional
*/
mixins: SpecPolicy.DEFINE_MANY,
/**
* An object containing properties and methods that should be defined on
* the component's constructor instead of its prototype (static methods).
*
* @type {object}
* @optional
*/
statics: SpecPolicy.DEFINE_MANY,
/**
* Definition of prop types for this component.
*
* @type {object}
* @optional
*/
propTypes: SpecPolicy.DEFINE_MANY,
/**
* Definition of context types for this component.
*
* @type {object}
* @optional
*/
contextTypes: SpecPolicy.DEFINE_MANY,
/**
* Definition of context types this component sets for its children.
*
* @type {object}
* @optional
*/
childContextTypes: SpecPolicy.DEFINE_MANY,
// ==== Definition methods ====
/**
* Invoked when the component is mounted. Values in the mapping will be set on
* `this.props` if that prop is not specified (i.e. using an `in` check).
*
* This method is invoked before `getInitialState` and therefore cannot rely
* on `this.state` or use `this.setState`.
*
* @return {object}
* @optional
*/
getDefaultProps: SpecPolicy.DEFINE_MANY_MERGED,
/**
* Invoked once before the component is mounted. The return value will be used
* as the initial value of `this.state`.
*
* getInitialState: function() {
* return {
* isOn: false,
* fooBaz: new BazFoo()
* }
* }
*
* @return {object}
* @optional
*/
getInitialState: SpecPolicy.DEFINE_MANY_MERGED,
/**
* @return {object}
* @optional
*/
getChildContext: SpecPolicy.DEFINE_MANY_MERGED,
/**
* Uses props from `this.props` and state from `this.state` to render the
* structure of the component.
*
* No guarantees are made about when or how often this method is invoked, so
* it must not have side effects.
*
* render: function() {
* var name = this.props.name;
* return <div>Hello, {name}!</div>;
* }
*
* @return {ReactComponent}
* @nosideeffects
* @required
*/
render: SpecPolicy.DEFINE_ONCE,
// ==== Delegate methods ====
/**
* Invoked when the component is initially created and about to be mounted.
* This may have side effects, but any external subscriptions or data created
* by this method must be cleaned up in `componentWillUnmount`.
*
* @optional
*/
componentWillMount: SpecPolicy.DEFINE_MANY,
/**
* Invoked when the component has been mounted and has a DOM representation.
* However, there is no guarantee that the DOM node is in the document.
*
* Use this as an opportunity to operate on the DOM when the component has
* been mounted (initialized and rendered) for the first time.
*
* @param {DOMElement} rootNode DOM element representing the component.
* @optional
*/
componentDidMount: SpecPolicy.DEFINE_MANY,
/**
* Invoked before the component receives new props.
*
* Use this as an opportunity to react to a prop transition by updating the
* state using `this.setState`. Current props are accessed via `this.props`.
*
* componentWillReceiveProps: function(nextProps, nextContext) {
* this.setState({
* likesIncreasing: nextProps.likeCount > this.props.likeCount
* });
* }
*
* NOTE: There is no equivalent `componentWillReceiveState`. An incoming prop
* transition may cause a state change, but the opposite is not true. If you
* need it, you are probably looking for `componentWillUpdate`.
*
* @param {object} nextProps
* @optional
*/
componentWillReceiveProps: SpecPolicy.DEFINE_MANY,
/**
* Invoked while deciding if the component should be updated as a result of
* receiving new props, state and/or context.
*
* Use this as an opportunity to `return false` when you're certain that the
* transition to the new props/state/context will not require a component
* update.
*
* shouldComponentUpdate: function(nextProps, nextState, nextContext) {
* return !equal(nextProps, this.props) ||
* !equal(nextState, this.state) ||
* !equal(nextContext, this.context);
* }
*
* @param {object} nextProps
* @param {?object} nextState
* @param {?object} nextContext
* @return {boolean} True if the component should update.
* @optional
*/
shouldComponentUpdate: SpecPolicy.DEFINE_ONCE,
/**
* Invoked when the component is about to update due to a transition from
* `this.props`, `this.state` and `this.context` to `nextProps`, `nextState`
* and `nextContext`.
*
* Use this as an opportunity to perform preparation before an update occurs.
*
* NOTE: You **cannot** use `this.setState()` in this method.
*
* @param {object} nextProps
* @param {?object} nextState
* @param {?object} nextContext
* @param {ReactReconcileTransaction} transaction
* @optional
*/
componentWillUpdate: SpecPolicy.DEFINE_MANY,
/**
* Invoked when the component's DOM representation has been updated.
*
* Use this as an opportunity to operate on the DOM when the component has
* been updated.
*
* @param {object} prevProps
* @param {?object} prevState
* @param {?object} prevContext
* @param {DOMElement} rootNode DOM element representing the component.
* @optional
*/
componentDidUpdate: SpecPolicy.DEFINE_MANY,
/**
* Invoked when the component is about to be removed from its parent and have
* its DOM representation destroyed.
*
* Use this as an opportunity to deallocate any external resources.
*
* NOTE: There is no `componentDidUnmount` since your component will have been
* destroyed by that point.
*
* @optional
*/
componentWillUnmount: SpecPolicy.DEFINE_MANY,
// ==== Advanced methods ====
/**
* Updates the component's currently mounted DOM representation.
*
* By default, this implements React's rendering and reconciliation algorithm.
* Sophisticated clients may wish to override this.
*
* @param {ReactReconcileTransaction} transaction
* @internal
* @overridable
*/
updateComponent: SpecPolicy.OVERRIDE_BASE
};
/**
* Mapping from class specification keys to special processing functions.
*
* Although these are declared like instance properties in the specification
* when defining classes using `React.createClass`, they are actually static
* and are accessible on the constructor instead of the prototype. Despite
* being static, they must be defined outside of the "statics" key under
* which all other static methods are defined.
*/
var RESERVED_SPEC_KEYS = {
displayName: function(Constructor, displayName) {
Constructor.displayName = displayName;
},
mixins: function(Constructor, mixins) {
if (mixins) {
for (var i = 0; i < mixins.length; i++) {
mixSpecIntoComponent(Constructor, mixins[i]);
}
}
},
childContextTypes: function(Constructor, childContextTypes) {
if ("production" !== "production") {
validateTypeDef(
Constructor,
childContextTypes,
ReactPropTypeLocations.childContext
);
}
Constructor.childContextTypes = assign(
{},
Constructor.childContextTypes,
childContextTypes
);
},
contextTypes: function(Constructor, contextTypes) {
if ("production" !== "production") {
validateTypeDef(
Constructor,
contextTypes,
ReactPropTypeLocations.context
);
}
Constructor.contextTypes = assign(
{},
Constructor.contextTypes,
contextTypes
);
},
/**
* Special case getDefaultProps which should move into statics but requires
* automatic merging.
*/
getDefaultProps: function(Constructor, getDefaultProps) {
if (Constructor.getDefaultProps) {
Constructor.getDefaultProps = createMergedResultFunction(
Constructor.getDefaultProps,
getDefaultProps
);
} else {
Constructor.getDefaultProps = getDefaultProps;
}
},
propTypes: function(Constructor, propTypes) {
if ("production" !== "production") {
validateTypeDef(
Constructor,
propTypes,
ReactPropTypeLocations.prop
);
}
Constructor.propTypes = assign(
{},
Constructor.propTypes,
propTypes
);
},
statics: function(Constructor, statics) {
mixStaticSpecIntoComponent(Constructor, statics);
}
};
function validateTypeDef(Constructor, typeDef, location) {
for (var propName in typeDef) {
if (typeDef.hasOwnProperty(propName)) {
// use a warning instead of an invariant so components
// don't show up in prod but not in __DEV__
("production" !== "production" ? warning(
typeof typeDef[propName] === 'function',
'%s: %s type `%s` is invalid; it must be a function, usually from ' +
'React.PropTypes.',
Constructor.displayName || 'ReactClass',
ReactPropTypeLocationNames[location],
propName
) : null);
}
}
}
function validateMethodOverride(proto, name) {
var specPolicy = ReactClassInterface.hasOwnProperty(name) ?
ReactClassInterface[name] :
null;
// Disallow overriding of base class methods unless explicitly allowed.
if (ReactClassMixin.hasOwnProperty(name)) {
("production" !== "production" ? invariant(
specPolicy === SpecPolicy.OVERRIDE_BASE,
'ReactClassInterface: You are attempting to override ' +
'`%s` from your class specification. Ensure that your method names ' +
'do not overlap with React methods.',
name
) : invariant(specPolicy === SpecPolicy.OVERRIDE_BASE));
}
// Disallow defining methods more than once unless explicitly allowed.
if (proto.hasOwnProperty(name)) {
("production" !== "production" ? invariant(
specPolicy === SpecPolicy.DEFINE_MANY ||
specPolicy === SpecPolicy.DEFINE_MANY_MERGED,
'ReactClassInterface: You are attempting to define ' +
'`%s` on your component more than once. This conflict may be due ' +
'to a mixin.',
name
) : invariant(specPolicy === SpecPolicy.DEFINE_MANY ||
specPolicy === SpecPolicy.DEFINE_MANY_MERGED));
}
}
/**
* Mixin helper which handles policy validation and reserved
* specification keys when building React classses.
*/
function mixSpecIntoComponent(Constructor, spec) {
if (!spec) {
return;
}
("production" !== "production" ? invariant(
typeof spec !== 'function',
'ReactClass: You\'re attempting to ' +
'use a component class as a mixin. Instead, just use a regular object.'
) : invariant(typeof spec !== 'function'));
("production" !== "production" ? invariant(
!ReactElement.isValidElement(spec),
'ReactClass: You\'re attempting to ' +
'use a component as a mixin. Instead, just use a regular object.'
) : invariant(!ReactElement.isValidElement(spec)));
var proto = Constructor.prototype;
// By handling mixins before any other properties, we ensure the same
// chaining order is applied to methods with DEFINE_MANY policy, whether
// mixins are listed before or after these methods in the spec.
if (spec.hasOwnProperty(MIXINS_KEY)) {
RESERVED_SPEC_KEYS.mixins(Constructor, spec.mixins);
}
for (var name in spec) {
if (!spec.hasOwnProperty(name)) {
continue;
}
if (name === MIXINS_KEY) {
// We have already handled mixins in a special case above
continue;
}
var property = spec[name];
validateMethodOverride(proto, name);
if (RESERVED_SPEC_KEYS.hasOwnProperty(name)) {
RESERVED_SPEC_KEYS[name](Constructor, property);
} else {
// Setup methods on prototype:
// The following member methods should not be automatically bound:
// 1. Expected ReactClass methods (in the "interface").
// 2. Overridden methods (that were mixed in).
var isReactClassMethod =
ReactClassInterface.hasOwnProperty(name);
var isAlreadyDefined = proto.hasOwnProperty(name);
var markedDontBind = property && property.__reactDontBind;
var isFunction = typeof property === 'function';
var shouldAutoBind =
isFunction &&
!isReactClassMethod &&
!isAlreadyDefined &&
!markedDontBind;
if (shouldAutoBind) {
if (!proto.__reactAutoBindMap) {
proto.__reactAutoBindMap = {};
}
proto.__reactAutoBindMap[name] = property;
proto[name] = property;
} else {
if (isAlreadyDefined) {
var specPolicy = ReactClassInterface[name];
// These cases should already be caught by validateMethodOverride
("production" !== "production" ? invariant(
isReactClassMethod && (
(specPolicy === SpecPolicy.DEFINE_MANY_MERGED || specPolicy === SpecPolicy.DEFINE_MANY)
),
'ReactClass: Unexpected spec policy %s for key %s ' +
'when mixing in component specs.',
specPolicy,
name
) : invariant(isReactClassMethod && (
(specPolicy === SpecPolicy.DEFINE_MANY_MERGED || specPolicy === SpecPolicy.DEFINE_MANY)
)));
// For methods which are defined more than once, call the existing
// methods before calling the new property, merging if appropriate.
if (specPolicy === SpecPolicy.DEFINE_MANY_MERGED) {
proto[name] = createMergedResultFunction(proto[name], property);
} else if (specPolicy === SpecPolicy.DEFINE_MANY) {
proto[name] = createChainedFunction(proto[name], property);
}
} else {
proto[name] = property;
if ("production" !== "production") {
// Add verbose displayName to the function, which helps when looking
// at profiling tools.
if (typeof property === 'function' && spec.displayName) {
proto[name].displayName = spec.displayName + '_' + name;
}
}
}
}
}
}
}
function mixStaticSpecIntoComponent(Constructor, statics) {
if (!statics) {
return;
}
for (var name in statics) {
var property = statics[name];
if (!statics.hasOwnProperty(name)) {
continue;
}
var isReserved = name in RESERVED_SPEC_KEYS;
("production" !== "production" ? invariant(
!isReserved,
'ReactClass: You are attempting to define a reserved ' +
'property, `%s`, that shouldn\'t be on the "statics" key. Define it ' +
'as an instance property instead; it will still be accessible on the ' +
'constructor.',
name
) : invariant(!isReserved));
var isInherited = name in Constructor;
("production" !== "production" ? invariant(
!isInherited,
'ReactClass: You are attempting to define ' +
'`%s` on your component more than once. This conflict may be ' +
'due to a mixin.',
name
) : invariant(!isInherited));
Constructor[name] = property;
}
}
/**
* Merge two objects, but throw if both contain the same key.
*
* @param {object} one The first object, which is mutated.
* @param {object} two The second object
* @return {object} one after it has been mutated to contain everything in two.
*/
function mergeIntoWithNoDuplicateKeys(one, two) {
("production" !== "production" ? invariant(
one && two && typeof one === 'object' && typeof two === 'object',
'mergeIntoWithNoDuplicateKeys(): Cannot merge non-objects.'
) : invariant(one && two && typeof one === 'object' && typeof two === 'object'));
for (var key in two) {
if (two.hasOwnProperty(key)) {
("production" !== "production" ? invariant(
one[key] === undefined,
'mergeIntoWithNoDuplicateKeys(): ' +
'Tried to merge two objects with the same key: `%s`. This conflict ' +
'may be due to a mixin; in particular, this may be caused by two ' +
'getInitialState() or getDefaultProps() methods returning objects ' +
'with clashing keys.',
key
) : invariant(one[key] === undefined));
one[key] = two[key];
}
}
return one;
}
/**
* Creates a function that invokes two functions and merges their return values.
*
* @param {function} one Function to invoke first.
* @param {function} two Function to invoke second.
* @return {function} Function that invokes the two argument functions.
* @private
*/
function createMergedResultFunction(one, two) {
return function mergedResult() {
var a = one.apply(this, arguments);
var b = two.apply(this, arguments);
if (a == null) {
return b;
} else if (b == null) {
return a;
}
var c = {};
mergeIntoWithNoDuplicateKeys(c, a);
mergeIntoWithNoDuplicateKeys(c, b);
return c;
};
}
/**
* Creates a function that invokes two functions and ignores their return vales.
*
* @param {function} one Function to invoke first.
* @param {function} two Function to invoke second.
* @return {function} Function that invokes the two argument functions.
* @private
*/
function createChainedFunction(one, two) {
return function chainedFunction() {
one.apply(this, arguments);
two.apply(this, arguments);
};
}
/**
* Binds a method to the component.
*
* @param {object} component Component whose method is going to be bound.
* @param {function} method Method to be bound.
* @return {function} The bound method.
*/
function bindAutoBindMethod(component, method) {
var boundMethod = method.bind(component);
if ("production" !== "production") {
boundMethod.__reactBoundContext = component;
boundMethod.__reactBoundMethod = method;
boundMethod.__reactBoundArguments = null;
var componentName = component.constructor.displayName;
var _bind = boundMethod.bind;
/* eslint-disable block-scoped-var, no-undef */
boundMethod.bind = function(newThis ) {for (var args=[],$__0=1,$__1=arguments.length;$__0<$__1;$__0++) args.push(arguments[$__0]);
// User is trying to bind() an autobound method; we effectively will
// ignore the value of "this" that the user is trying to use, so
// let's warn.
if (newThis !== component && newThis !== null) {
("production" !== "production" ? warning(
false,
'bind(): React component methods may only be bound to the ' +
'component instance. See %s',
componentName
) : null);
} else if (!args.length) {
("production" !== "production" ? warning(
false,
'bind(): You are binding a component method to the component. ' +
'React does this for you automatically in a high-performance ' +
'way, so you can safely remove this call. See %s',
componentName
) : null);
return boundMethod;
}
var reboundMethod = _bind.apply(boundMethod, arguments);
reboundMethod.__reactBoundContext = component;
reboundMethod.__reactBoundMethod = method;
reboundMethod.__reactBoundArguments = args;
return reboundMethod;
/* eslint-enable */
};
}
return boundMethod;
}
/**
* Binds all auto-bound methods in a component.
*
* @param {object} component Component whose method is going to be bound.
*/
function bindAutoBindMethods(component) {
for (var autoBindKey in component.__reactAutoBindMap) {
if (component.__reactAutoBindMap.hasOwnProperty(autoBindKey)) {
var method = component.__reactAutoBindMap[autoBindKey];
component[autoBindKey] = bindAutoBindMethod(
component,
ReactErrorUtils.guard(
method,
component.constructor.displayName + '.' + autoBindKey
)
);
}
}
}
var typeDeprecationDescriptor = {
enumerable: false,
get: function() {
var displayName = this.displayName || this.name || 'Component';
("production" !== "production" ? warning(
false,
'%s.type is deprecated. Use %s directly to access the class.',
displayName,
displayName
) : null);
Object.defineProperty(this, 'type', {
value: this
});
return this;
}
};
/**
* Add more to the ReactClass base class. These are all legacy features and
* therefore not already part of the modern ReactComponent.
*/
var ReactClassMixin = {
/**
* TODO: This will be deprecated because state should always keep a consistent
* type signature and the only use case for this, is to avoid that.
*/
replaceState: function(newState, callback) {
ReactUpdateQueue.enqueueReplaceState(this, newState);
if (callback) {
ReactUpdateQueue.enqueueCallback(this, callback);
}
},
/**
* Checks whether or not this composite component is mounted.
* @return {boolean} True if mounted, false otherwise.
* @protected
* @final
*/
isMounted: function() {
if ("production" !== "production") {
var owner = ReactCurrentOwner.current;
if (owner !== null) {
("production" !== "production" ? warning(
owner._warnedAboutRefsInRender,
'%s is accessing isMounted inside its render() function. ' +
'render() should be a pure function of props and state. It should ' +
'never access something that requires stale data from the previous ' +
'render, such as refs. Move this logic to componentDidMount and ' +
'componentDidUpdate instead.',
owner.getName() || 'A component'
) : null);
owner._warnedAboutRefsInRender = true;
}
}
var internalInstance = ReactInstanceMap.get(this);
return (
internalInstance &&
internalInstance !== ReactLifeCycle.currentlyMountingInstance
);
},
/**
* Sets a subset of the props.
*
* @param {object} partialProps Subset of the next props.
* @param {?function} callback Called after props are updated.
* @final
* @public
* @deprecated
*/
setProps: function(partialProps, callback) {
ReactUpdateQueue.enqueueSetProps(this, partialProps);
if (callback) {
ReactUpdateQueue.enqueueCallback(this, callback);
}
},
/**
* Replace all the props.
*
* @param {object} newProps Subset of the next props.
* @param {?function} callback Called after props are updated.
* @final
* @public
* @deprecated
*/
replaceProps: function(newProps, callback) {
ReactUpdateQueue.enqueueReplaceProps(this, newProps);
if (callback) {
ReactUpdateQueue.enqueueCallback(this, callback);
}
}
};
var ReactClassComponent = function() {};
assign(
ReactClassComponent.prototype,
ReactComponent.prototype,
ReactClassMixin
);
/**
* Module for creating composite components.
*
* @class ReactClass
*/
var ReactClass = {
/**
* Creates a composite component class given a class specification.
*
* @param {object} spec Class specification (which must define `render`).
* @return {function} Component constructor function.
* @public
*/
createClass: function(spec) {
var Constructor = function(props, context) {
// This constructor is overridden by mocks. The argument is used
// by mocks to assert on what gets mounted.
if ("production" !== "production") {
("production" !== "production" ? warning(
this instanceof Constructor,
'Something is calling a React component directly. Use a factory or ' +
'JSX instead. See: https://fb.me/react-legacyfactory'
) : null);
}
// Wire up auto-binding
if (this.__reactAutoBindMap) {
bindAutoBindMethods(this);
}
this.props = props;
this.context = context;
this.state = null;
// ReactClasses doesn't have constructors. Instead, they use the
// getInitialState and componentWillMount methods for initialization.
var initialState = this.getInitialState ? this.getInitialState() : null;
if ("production" !== "production") {
// We allow auto-mocks to proceed as if they're returning null.
if (typeof initialState === 'undefined' &&
this.getInitialState._isMockFunction) {
// This is probably bad practice. Consider warning here and
// deprecating this convenience.
initialState = null;
}
}
("production" !== "production" ? invariant(
typeof initialState === 'object' && !Array.isArray(initialState),
'%s.getInitialState(): must return an object or null',
Constructor.displayName || 'ReactCompositeComponent'
) : invariant(typeof initialState === 'object' && !Array.isArray(initialState)));
this.state = initialState;
};
Constructor.prototype = new ReactClassComponent();
Constructor.prototype.constructor = Constructor;
injectedMixins.forEach(
mixSpecIntoComponent.bind(null, Constructor)
);
mixSpecIntoComponent(Constructor, spec);
// Initialize the defaultProps property after all mixins have been merged
if (Constructor.getDefaultProps) {
Constructor.defaultProps = Constructor.getDefaultProps();
}
if ("production" !== "production") {
// This is a tag to indicate that the use of these method names is ok,
// since it's used with createClass. If it's not, then it's likely a
// mistake so we'll warn you to use the static property, property
// initializer or constructor respectively.
if (Constructor.getDefaultProps) {
Constructor.getDefaultProps.isReactClassApproved = {};
}
if (Constructor.prototype.getInitialState) {
Constructor.prototype.getInitialState.isReactClassApproved = {};
}
}
("production" !== "production" ? invariant(
Constructor.prototype.render,
'createClass(...): Class specification must implement a `render` method.'
) : invariant(Constructor.prototype.render));
if ("production" !== "production") {
("production" !== "production" ? warning(
!Constructor.prototype.componentShouldUpdate,
'%s has a method called ' +
'componentShouldUpdate(). Did you mean shouldComponentUpdate()? ' +
'The name is phrased as a question because the function is ' +
'expected to return a value.',
spec.displayName || 'A component'
) : null);
}
// Reduce time spent doing lookups by setting these on the prototype.
for (var methodName in ReactClassInterface) {
if (!Constructor.prototype[methodName]) {
Constructor.prototype[methodName] = null;
}
}
// Legacy hook
Constructor.type = Constructor;
if ("production" !== "production") {
try {
Object.defineProperty(Constructor, 'type', typeDeprecationDescriptor);
} catch (x) {
// IE will fail on defineProperty (es5-shim/sham too)
}
}
return Constructor;
},
injection: {
injectMixin: function(mixin) {
injectedMixins.push(mixin);
}
}
};
module.exports = ReactClass;
},{"./Object.assign":33,"./ReactComponent":43,"./ReactCurrentOwner":49,"./ReactElement":67,"./ReactErrorUtils":70,"./ReactInstanceMap":77,"./ReactLifeCycle":78,"./ReactPropTypeLocationNames":88,"./ReactPropTypeLocations":89,"./ReactUpdateQueue":103,"./invariant":155,"./keyMirror":161,"./keyOf":162,"./warning":176}],43:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactComponent
*/
'use strict';
var ReactUpdateQueue = require("./ReactUpdateQueue");
var invariant = require("./invariant");
var warning = require("./warning");
/**
* Base class helpers for the updating state of a component.
*/
function ReactComponent(props, context) {
this.props = props;
this.context = context;
}
/**
* Sets a subset of the state. Always use this to mutate
* state. You should treat `this.state` as immutable.
*
* There is no guarantee that `this.state` will be immediately updated, so
* accessing `this.state` after calling this method may return the old value.
*
* There is no guarantee that calls to `setState` will run synchronously,
* as they may eventually be batched together. You can provide an optional
* callback that will be executed when the call to setState is actually
* completed.
*
* When a function is provided to setState, it will be called at some point in
* the future (not synchronously). It will be called with the up to date
* component arguments (state, props, context). These values can be different
* from this.* because your function may be called after receiveProps but before
* shouldComponentUpdate, and this new state, props, and context will not yet be
* assigned to this.
*
* @param {object|function} partialState Next partial state or function to
* produce next partial state to be merged with current state.
* @param {?function} callback Called after state is updated.
* @final
* @protected
*/
ReactComponent.prototype.setState = function(partialState, callback) {
("production" !== "production" ? invariant(
typeof partialState === 'object' ||
typeof partialState === 'function' ||
partialState == null,
'setState(...): takes an object of state variables to update or a ' +
'function which returns an object of state variables.'
) : invariant(typeof partialState === 'object' ||
typeof partialState === 'function' ||
partialState == null));
if ("production" !== "production") {
("production" !== "production" ? warning(
partialState != null,
'setState(...): You passed an undefined or null state object; ' +
'instead, use forceUpdate().'
) : null);
}
ReactUpdateQueue.enqueueSetState(this, partialState);
if (callback) {
ReactUpdateQueue.enqueueCallback(this, callback);
}
};
/**
* Forces an update. This should only be invoked when it is known with
* certainty that we are **not** in a DOM transaction.
*
* You may want to call this when you know that some deeper aspect of the
* component's state has changed but `setState` was not called.
*
* This will not invoke `shouldComponentUpdate`, but it will invoke
* `componentWillUpdate` and `componentDidUpdate`.
*
* @param {?function} callback Called after update is complete.
* @final
* @protected
*/
ReactComponent.prototype.forceUpdate = function(callback) {
ReactUpdateQueue.enqueueForceUpdate(this);
if (callback) {
ReactUpdateQueue.enqueueCallback(this, callback);
}
};
/**
* Deprecated APIs. These APIs used to exist on classic React classes but since
* we would like to deprecate them, we're not going to move them over to this
* modern base class. Instead, we define a getter that warns if it's accessed.
*/
if ("production" !== "production") {
var deprecatedAPIs = {
getDOMNode: [
'getDOMNode',
'Use React.findDOMNode(component) instead.'
],
isMounted: [
'isMounted',
'Instead, make sure to clean up subscriptions and pending requests in ' +
'componentWillUnmount to prevent memory leaks.'
],
replaceProps: [
'replaceProps',
'Instead, call React.render again at the top level.'
],
replaceState: [
'replaceState',
'Refactor your code to use setState instead (see ' +
'https://github.com/facebook/react/issues/3236).'
],
setProps: [
'setProps',
'Instead, call React.render again at the top level.'
]
};
var defineDeprecationWarning = function(methodName, info) {
try {
Object.defineProperty(ReactComponent.prototype, methodName, {
get: function() {
("production" !== "production" ? warning(
false,
'%s(...) is deprecated in plain JavaScript React classes. %s',
info[0],
info[1]
) : null);
return undefined;
}
});
} catch (x) {
// IE will fail on defineProperty (es5-shim/sham too)
}
};
for (var fnName in deprecatedAPIs) {
if (deprecatedAPIs.hasOwnProperty(fnName)) {
defineDeprecationWarning(fnName, deprecatedAPIs[fnName]);
}
}
}
module.exports = ReactComponent;
},{"./ReactUpdateQueue":103,"./invariant":155,"./warning":176}],44:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactComponentBrowserEnvironment
*/
/*jslint evil: true */
'use strict';
var ReactDOMIDOperations = require("./ReactDOMIDOperations");
var ReactMount = require("./ReactMount");
/**
* Abstracts away all functionality of the reconciler that requires knowledge of
* the browser context. TODO: These callers should be refactored to avoid the
* need for this injection.
*/
var ReactComponentBrowserEnvironment = {
processChildrenUpdates:
ReactDOMIDOperations.dangerouslyProcessChildrenUpdates,
replaceNodeWithMarkupByID:
ReactDOMIDOperations.dangerouslyReplaceNodeWithMarkupByID,
/**
* If a particular environment requires that some resources be cleaned up,
* specify this in the injected Mixin. In the DOM, we would likely want to
* purge any cached node ID lookups.
*
* @private
*/
unmountIDFromEnvironment: function(rootNodeID) {
ReactMount.purgeID(rootNodeID);
}
};
module.exports = ReactComponentBrowserEnvironment;
},{"./ReactDOMIDOperations":54,"./ReactMount":81}],45:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactComponentEnvironment
*/
'use strict';
var invariant = require("./invariant");
var injected = false;
var ReactComponentEnvironment = {
/**
* Optionally injectable environment dependent cleanup hook. (server vs.
* browser etc). Example: A browser system caches DOM nodes based on component
* ID and must remove that cache entry when this instance is unmounted.
*/
unmountIDFromEnvironment: null,
/**
* Optionally injectable hook for swapping out mount images in the middle of
* the tree.
*/
replaceNodeWithMarkupByID: null,
/**
* Optionally injectable hook for processing a queue of child updates. Will
* later move into MultiChildComponents.
*/
processChildrenUpdates: null,
injection: {
injectEnvironment: function(environment) {
("production" !== "production" ? invariant(
!injected,
'ReactCompositeComponent: injectEnvironment() can only be called once.'
) : invariant(!injected));
ReactComponentEnvironment.unmountIDFromEnvironment =
environment.unmountIDFromEnvironment;
ReactComponentEnvironment.replaceNodeWithMarkupByID =
environment.replaceNodeWithMarkupByID;
ReactComponentEnvironment.processChildrenUpdates =
environment.processChildrenUpdates;
injected = true;
}
}
};
module.exports = ReactComponentEnvironment;
},{"./invariant":155}],46:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactComponentWithPureRenderMixin
*/
'use strict';
var shallowEqual = require("./shallowEqual");
/**
* If your React component's render function is "pure", e.g. it will render the
* same result given the same props and state, provide this Mixin for a
* considerable performance boost.
*
* Most React components have pure render functions.
*
* Example:
*
* var ReactComponentWithPureRenderMixin =
* require('ReactComponentWithPureRenderMixin');
* React.createClass({
* mixins: [ReactComponentWithPureRenderMixin],
*
* render: function() {
* return <div className={this.props.className}>foo</div>;
* }
* });
*
* Note: This only checks shallow equality for props and state. If these contain
* complex data structures this mixin may have false-negatives for deeper
* differences. Only mixin to components which have simple props and state, or
* use `forceUpdate()` when you know deep data structures have changed.
*/
var ReactComponentWithPureRenderMixin = {
shouldComponentUpdate: function(nextProps, nextState) {
return !shallowEqual(this.props, nextProps) ||
!shallowEqual(this.state, nextState);
}
};
module.exports = ReactComponentWithPureRenderMixin;
},{"./shallowEqual":171}],47:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactCompositeComponent
*/
'use strict';
var ReactComponentEnvironment = require("./ReactComponentEnvironment");
var ReactContext = require("./ReactContext");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactElement = require("./ReactElement");
var ReactElementValidator = require("./ReactElementValidator");
var ReactInstanceMap = require("./ReactInstanceMap");
var ReactLifeCycle = require("./ReactLifeCycle");
var ReactNativeComponent = require("./ReactNativeComponent");
var ReactPerf = require("./ReactPerf");
var ReactPropTypeLocations = require("./ReactPropTypeLocations");
var ReactPropTypeLocationNames = require("./ReactPropTypeLocationNames");
var ReactReconciler = require("./ReactReconciler");
var ReactUpdates = require("./ReactUpdates");
var assign = require("./Object.assign");
var emptyObject = require("./emptyObject");
var invariant = require("./invariant");
var shouldUpdateReactComponent = require("./shouldUpdateReactComponent");
var warning = require("./warning");
function getDeclarationErrorAddendum(component) {
var owner = component._currentElement._owner || null;
if (owner) {
var name = owner.getName();
if (name) {
return ' Check the render method of `' + name + '`.';
}
}
return '';
}
/**
* ------------------ The Life-Cycle of a Composite Component ------------------
*
* - constructor: Initialization of state. The instance is now retained.
* - componentWillMount
* - render
* - [children's constructors]
* - [children's componentWillMount and render]
* - [children's componentDidMount]
* - componentDidMount
*
* Update Phases:
* - componentWillReceiveProps (only called if parent updated)
* - shouldComponentUpdate
* - componentWillUpdate
* - render
* - [children's constructors or receive props phases]
* - componentDidUpdate
*
* - componentWillUnmount
* - [children's componentWillUnmount]
* - [children destroyed]
* - (destroyed): The instance is now blank, released by React and ready for GC.
*
* -----------------------------------------------------------------------------
*/
/**
* An incrementing ID assigned to each component when it is mounted. This is
* used to enforce the order in which `ReactUpdates` updates dirty components.
*
* @private
*/
var nextMountID = 1;
/**
* @lends {ReactCompositeComponent.prototype}
*/
var ReactCompositeComponentMixin = {
/**
* Base constructor for all composite component.
*
* @param {ReactElement} element
* @final
* @internal
*/
construct: function(element) {
this._currentElement = element;
this._rootNodeID = null;
this._instance = null;
// See ReactUpdateQueue
this._pendingElement = null;
this._pendingStateQueue = null;
this._pendingReplaceState = false;
this._pendingForceUpdate = false;
this._renderedComponent = null;
this._context = null;
this._mountOrder = 0;
this._isTopLevel = false;
// See ReactUpdates and ReactUpdateQueue.
this._pendingCallbacks = null;
},
/**
* Initializes the component, renders markup, and registers event listeners.
*
* @param {string} rootID DOM ID of the root node.
* @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction
* @return {?string} Rendered markup to be inserted into the DOM.
* @final
* @internal
*/
mountComponent: function(rootID, transaction, context) {
this._context = context;
this._mountOrder = nextMountID++;
this._rootNodeID = rootID;
var publicProps = this._processProps(this._currentElement.props);
var publicContext = this._processContext(this._currentElement._context);
var Component = ReactNativeComponent.getComponentClassForElement(
this._currentElement
);
// Initialize the public class
var inst = new Component(publicProps, publicContext);
if ("production" !== "production") {
// This will throw later in _renderValidatedComponent, but add an early
// warning now to help debugging
("production" !== "production" ? warning(
inst.render != null,
'%s(...): No `render` method found on the returned component ' +
'instance: you may have forgotten to define `render` in your ' +
'component or you may have accidentally tried to render an element ' +
'whose type is a function that isn\'t a React component.',
Component.displayName || Component.name || 'Component'
) : null);
}
// These should be set up in the constructor, but as a convenience for
// simpler class abstractions, we set them up after the fact.
inst.props = publicProps;
inst.context = publicContext;
inst.refs = emptyObject;
this._instance = inst;
// Store a reference from the instance back to the internal representation
ReactInstanceMap.set(inst, this);
if ("production" !== "production") {
this._warnIfContextsDiffer(this._currentElement._context, context);
}
if ("production" !== "production") {
// Since plain JS classes are defined without any special initialization
// logic, we can not catch common errors early. Therefore, we have to
// catch them here, at initialization time, instead.
("production" !== "production" ? warning(
!inst.getInitialState ||
inst.getInitialState.isReactClassApproved,
'getInitialState was defined on %s, a plain JavaScript class. ' +
'This is only supported for classes created using React.createClass. ' +
'Did you mean to define a state property instead?',
this.getName() || 'a component'
) : null);
("production" !== "production" ? warning(
!inst.getDefaultProps ||
inst.getDefaultProps.isReactClassApproved,
'getDefaultProps was defined on %s, a plain JavaScript class. ' +
'This is only supported for classes created using React.createClass. ' +
'Use a static property to define defaultProps instead.',
this.getName() || 'a component'
) : null);
("production" !== "production" ? warning(
!inst.propTypes,
'propTypes was defined as an instance property on %s. Use a static ' +
'property to define propTypes instead.',
this.getName() || 'a component'
) : null);
("production" !== "production" ? warning(
!inst.contextTypes,
'contextTypes was defined as an instance property on %s. Use a ' +
'static property to define contextTypes instead.',
this.getName() || 'a component'
) : null);
("production" !== "production" ? warning(
typeof inst.componentShouldUpdate !== 'function',
'%s has a method called ' +
'componentShouldUpdate(). Did you mean shouldComponentUpdate()? ' +
'The name is phrased as a question because the function is ' +
'expected to return a value.',
(this.getName() || 'A component')
) : null);
}
var initialState = inst.state;
if (initialState === undefined) {
inst.state = initialState = null;
}
("production" !== "production" ? invariant(
typeof initialState === 'object' && !Array.isArray(initialState),
'%s.state: must be set to an object or null',
this.getName() || 'ReactCompositeComponent'
) : invariant(typeof initialState === 'object' && !Array.isArray(initialState)));
this._pendingStateQueue = null;
this._pendingReplaceState = false;
this._pendingForceUpdate = false;
var childContext;
var renderedElement;
var previouslyMounting = ReactLifeCycle.currentlyMountingInstance;
ReactLifeCycle.currentlyMountingInstance = this;
try {
if (inst.componentWillMount) {
inst.componentWillMount();
// When mounting, calls to `setState` by `componentWillMount` will set
// `this._pendingStateQueue` without triggering a re-render.
if (this._pendingStateQueue) {
inst.state = this._processPendingState(inst.props, inst.context);
}
}
childContext = this._getValidatedChildContext(context);
renderedElement = this._renderValidatedComponent(childContext);
} finally {
ReactLifeCycle.currentlyMountingInstance = previouslyMounting;
}
this._renderedComponent = this._instantiateReactComponent(
renderedElement,
this._currentElement.type // The wrapping type
);
var markup = ReactReconciler.mountComponent(
this._renderedComponent,
rootID,
transaction,
this._mergeChildContext(context, childContext)
);
if (inst.componentDidMount) {
transaction.getReactMountReady().enqueue(inst.componentDidMount, inst);
}
return markup;
},
/**
* Releases any resources allocated by `mountComponent`.
*
* @final
* @internal
*/
unmountComponent: function() {
var inst = this._instance;
if (inst.componentWillUnmount) {
var previouslyUnmounting = ReactLifeCycle.currentlyUnmountingInstance;
ReactLifeCycle.currentlyUnmountingInstance = this;
try {
inst.componentWillUnmount();
} finally {
ReactLifeCycle.currentlyUnmountingInstance = previouslyUnmounting;
}
}
ReactReconciler.unmountComponent(this._renderedComponent);
this._renderedComponent = null;
// Reset pending fields
this._pendingStateQueue = null;
this._pendingReplaceState = false;
this._pendingForceUpdate = false;
this._pendingCallbacks = null;
this._pendingElement = null;
// These fields do not really need to be reset since this object is no
// longer accessible.
this._context = null;
this._rootNodeID = null;
// Delete the reference from the instance to this internal representation
// which allow the internals to be properly cleaned up even if the user
// leaks a reference to the public instance.
ReactInstanceMap.remove(inst);
// Some existing components rely on inst.props even after they've been
// destroyed (in event handlers).
// TODO: inst.props = null;
// TODO: inst.state = null;
// TODO: inst.context = null;
},
/**
* Schedule a partial update to the props. Only used for internal testing.
*
* @param {object} partialProps Subset of the next props.
* @param {?function} callback Called after props are updated.
* @final
* @internal
*/
_setPropsInternal: function(partialProps, callback) {
// This is a deoptimized path. We optimize for always having an element.
// This creates an extra internal element.
var element = this._pendingElement || this._currentElement;
this._pendingElement = ReactElement.cloneAndReplaceProps(
element,
assign({}, element.props, partialProps)
);
ReactUpdates.enqueueUpdate(this, callback);
},
/**
* Filters the context object to only contain keys specified in
* `contextTypes`
*
* @param {object} context
* @return {?object}
* @private
*/
_maskContext: function(context) {
var maskedContext = null;
// This really should be getting the component class for the element,
// but we know that we're not going to need it for built-ins.
if (typeof this._currentElement.type === 'string') {
return emptyObject;
}
var contextTypes = this._currentElement.type.contextTypes;
if (!contextTypes) {
return emptyObject;
}
maskedContext = {};
for (var contextName in contextTypes) {
maskedContext[contextName] = context[contextName];
}
return maskedContext;
},
/**
* Filters the context object to only contain keys specified in
* `contextTypes`, and asserts that they are valid.
*
* @param {object} context
* @return {?object}
* @private
*/
_processContext: function(context) {
var maskedContext = this._maskContext(context);
if ("production" !== "production") {
var Component = ReactNativeComponent.getComponentClassForElement(
this._currentElement
);
if (Component.contextTypes) {
this._checkPropTypes(
Component.contextTypes,
maskedContext,
ReactPropTypeLocations.context
);
}
}
return maskedContext;
},
/**
* @param {object} currentContext
* @return {object}
* @private
*/
_getValidatedChildContext: function(currentContext) {
var inst = this._instance;
var childContext = inst.getChildContext && inst.getChildContext();
if (childContext) {
("production" !== "production" ? invariant(
typeof inst.constructor.childContextTypes === 'object',
'%s.getChildContext(): childContextTypes must be defined in order to ' +
'use getChildContext().',
this.getName() || 'ReactCompositeComponent'
) : invariant(typeof inst.constructor.childContextTypes === 'object'));
if ("production" !== "production") {
this._checkPropTypes(
inst.constructor.childContextTypes,
childContext,
ReactPropTypeLocations.childContext
);
}
for (var name in childContext) {
("production" !== "production" ? invariant(
name in inst.constructor.childContextTypes,
'%s.getChildContext(): key "%s" is not defined in childContextTypes.',
this.getName() || 'ReactCompositeComponent',
name
) : invariant(name in inst.constructor.childContextTypes));
}
return childContext;
}
return null;
},
_mergeChildContext: function(currentContext, childContext) {
if (childContext) {
return assign({}, currentContext, childContext);
}
return currentContext;
},
/**
* Processes props by setting default values for unspecified props and
* asserting that the props are valid. Does not mutate its argument; returns
* a new props object with defaults merged in.
*
* @param {object} newProps
* @return {object}
* @private
*/
_processProps: function(newProps) {
if ("production" !== "production") {
var Component = ReactNativeComponent.getComponentClassForElement(
this._currentElement
);
if (Component.propTypes) {
this._checkPropTypes(
Component.propTypes,
newProps,
ReactPropTypeLocations.prop
);
}
}
return newProps;
},
/**
* Assert that the props are valid
*
* @param {object} propTypes Map of prop name to a ReactPropType
* @param {object} props
* @param {string} location e.g. "prop", "context", "child context"
* @private
*/
_checkPropTypes: function(propTypes, props, location) {
// TODO: Stop validating prop types here and only use the element
// validation.
var componentName = this.getName();
for (var propName in propTypes) {
if (propTypes.hasOwnProperty(propName)) {
var error;
try {
// This is intentionally an invariant that gets caught. It's the same
// behavior as without this statement except with a better message.
("production" !== "production" ? invariant(
typeof propTypes[propName] === 'function',
'%s: %s type `%s` is invalid; it must be a function, usually ' +
'from React.PropTypes.',
componentName || 'React class',
ReactPropTypeLocationNames[location],
propName
) : invariant(typeof propTypes[propName] === 'function'));
error = propTypes[propName](props, propName, componentName, location);
} catch (ex) {
error = ex;
}
if (error instanceof Error) {
// We may want to extend this logic for similar errors in
// React.render calls, so I'm abstracting it away into
// a function to minimize refactoring in the future
var addendum = getDeclarationErrorAddendum(this);
if (location === ReactPropTypeLocations.prop) {
// Preface gives us something to blacklist in warning module
("production" !== "production" ? warning(
false,
'Failed Composite propType: %s%s',
error.message,
addendum
) : null);
} else {
("production" !== "production" ? warning(
false,
'Failed Context Types: %s%s',
error.message,
addendum
) : null);
}
}
}
}
},
receiveComponent: function(nextElement, transaction, nextContext) {
var prevElement = this._currentElement;
var prevContext = this._context;
this._pendingElement = null;
this.updateComponent(
transaction,
prevElement,
nextElement,
prevContext,
nextContext
);
},
/**
* If any of `_pendingElement`, `_pendingStateQueue`, or `_pendingForceUpdate`
* is set, update the component.
*
* @param {ReactReconcileTransaction} transaction
* @internal
*/
performUpdateIfNecessary: function(transaction) {
if (this._pendingElement != null) {
ReactReconciler.receiveComponent(
this,
this._pendingElement || this._currentElement,
transaction,
this._context
);
}
if (this._pendingStateQueue !== null || this._pendingForceUpdate) {
if ("production" !== "production") {
ReactElementValidator.checkAndWarnForMutatedProps(
this._currentElement
);
}
this.updateComponent(
transaction,
this._currentElement,
this._currentElement,
this._context,
this._context
);
}
},
/**
* Compare two contexts, warning if they are different
* TODO: Remove this check when owner-context is removed
*/
_warnIfContextsDiffer: function(ownerBasedContext, parentBasedContext) {
ownerBasedContext = this._maskContext(ownerBasedContext);
parentBasedContext = this._maskContext(parentBasedContext);
var parentKeys = Object.keys(parentBasedContext).sort();
var displayName = this.getName() || 'ReactCompositeComponent';
for (var i = 0; i < parentKeys.length; i++) {
var key = parentKeys[i];
("production" !== "production" ? warning(
ownerBasedContext[key] === parentBasedContext[key],
'owner-based and parent-based contexts differ ' +
'(values: `%s` vs `%s`) for key (%s) while mounting %s ' +
'(see: http://fb.me/react-context-by-parent)',
ownerBasedContext[key],
parentBasedContext[key],
key,
displayName
) : null);
}
},
/**
* Perform an update to a mounted component. The componentWillReceiveProps and
* shouldComponentUpdate methods are called, then (assuming the update isn't
* skipped) the remaining update lifecycle methods are called and the DOM
* representation is updated.
*
* By default, this implements React's rendering and reconciliation algorithm.
* Sophisticated clients may wish to override this.
*
* @param {ReactReconcileTransaction} transaction
* @param {ReactElement} prevParentElement
* @param {ReactElement} nextParentElement
* @internal
* @overridable
*/
updateComponent: function(
transaction,
prevParentElement,
nextParentElement,
prevUnmaskedContext,
nextUnmaskedContext
) {
var inst = this._instance;
var nextContext = inst.context;
var nextProps = inst.props;
// Distinguish between a props update versus a simple state update
if (prevParentElement !== nextParentElement) {
nextContext = this._processContext(nextParentElement._context);
nextProps = this._processProps(nextParentElement.props);
if ("production" !== "production") {
if (nextUnmaskedContext != null) {
this._warnIfContextsDiffer(
nextParentElement._context,
nextUnmaskedContext
);
}
}
// An update here will schedule an update but immediately set
// _pendingStateQueue which will ensure that any state updates gets
// immediately reconciled instead of waiting for the next batch.
if (inst.componentWillReceiveProps) {
inst.componentWillReceiveProps(nextProps, nextContext);
}
}
var nextState = this._processPendingState(nextProps, nextContext);
var shouldUpdate =
this._pendingForceUpdate ||
!inst.shouldComponentUpdate ||
inst.shouldComponentUpdate(nextProps, nextState, nextContext);
if ("production" !== "production") {
("production" !== "production" ? warning(
typeof shouldUpdate !== 'undefined',
'%s.shouldComponentUpdate(): Returned undefined instead of a ' +
'boolean value. Make sure to return true or false.',
this.getName() || 'ReactCompositeComponent'
) : null);
}
if (shouldUpdate) {
this._pendingForceUpdate = false;
// Will set `this.props`, `this.state` and `this.context`.
this._performComponentUpdate(
nextParentElement,
nextProps,
nextState,
nextContext,
transaction,
nextUnmaskedContext
);
} else {
// If it's determined that a component should not update, we still want
// to set props and state but we shortcut the rest of the update.
this._currentElement = nextParentElement;
this._context = nextUnmaskedContext;
inst.props = nextProps;
inst.state = nextState;
inst.context = nextContext;
}
},
_processPendingState: function(props, context) {
var inst = this._instance;
var queue = this._pendingStateQueue;
var replace = this._pendingReplaceState;
this._pendingReplaceState = false;
this._pendingStateQueue = null;
if (!queue) {
return inst.state;
}
if (replace && queue.length === 1) {
return queue[0];
}
var nextState = assign({}, replace ? queue[0] : inst.state);
for (var i = replace ? 1 : 0; i < queue.length; i++) {
var partial = queue[i];
assign(
nextState,
typeof partial === 'function' ?
partial.call(inst, nextState, props, context) :
partial
);
}
return nextState;
},
/**
* Merges new props and state, notifies delegate methods of update and
* performs update.
*
* @param {ReactElement} nextElement Next element
* @param {object} nextProps Next public object to set as properties.
* @param {?object} nextState Next object to set as state.
* @param {?object} nextContext Next public object to set as context.
* @param {ReactReconcileTransaction} transaction
* @param {?object} unmaskedContext
* @private
*/
_performComponentUpdate: function(
nextElement,
nextProps,
nextState,
nextContext,
transaction,
unmaskedContext
) {
var inst = this._instance;
var prevProps = inst.props;
var prevState = inst.state;
var prevContext = inst.context;
if (inst.componentWillUpdate) {
inst.componentWillUpdate(nextProps, nextState, nextContext);
}
this._currentElement = nextElement;
this._context = unmaskedContext;
inst.props = nextProps;
inst.state = nextState;
inst.context = nextContext;
this._updateRenderedComponent(transaction, unmaskedContext);
if (inst.componentDidUpdate) {
transaction.getReactMountReady().enqueue(
inst.componentDidUpdate.bind(inst, prevProps, prevState, prevContext),
inst
);
}
},
/**
* Call the component's `render` method and update the DOM accordingly.
*
* @param {ReactReconcileTransaction} transaction
* @internal
*/
_updateRenderedComponent: function(transaction, context) {
var prevComponentInstance = this._renderedComponent;
var prevRenderedElement = prevComponentInstance._currentElement;
var childContext = this._getValidatedChildContext();
var nextRenderedElement = this._renderValidatedComponent(childContext);
if (shouldUpdateReactComponent(prevRenderedElement, nextRenderedElement)) {
ReactReconciler.receiveComponent(
prevComponentInstance,
nextRenderedElement,
transaction,
this._mergeChildContext(context, childContext)
);
} else {
// These two IDs are actually the same! But nothing should rely on that.
var thisID = this._rootNodeID;
var prevComponentID = prevComponentInstance._rootNodeID;
ReactReconciler.unmountComponent(prevComponentInstance);
this._renderedComponent = this._instantiateReactComponent(
nextRenderedElement,
this._currentElement.type
);
var nextMarkup = ReactReconciler.mountComponent(
this._renderedComponent,
thisID,
transaction,
this._mergeChildContext(context, childContext)
);
this._replaceNodeWithMarkupByID(prevComponentID, nextMarkup);
}
},
/**
* @protected
*/
_replaceNodeWithMarkupByID: function(prevComponentID, nextMarkup) {
ReactComponentEnvironment.replaceNodeWithMarkupByID(
prevComponentID,
nextMarkup
);
},
/**
* @protected
*/
_renderValidatedComponentWithoutOwnerOrContext: function() {
var inst = this._instance;
var renderedComponent = inst.render();
if ("production" !== "production") {
// We allow auto-mocks to proceed as if they're returning null.
if (typeof renderedComponent === 'undefined' &&
inst.render._isMockFunction) {
// This is probably bad practice. Consider warning here and
// deprecating this convenience.
renderedComponent = null;
}
}
return renderedComponent;
},
/**
* @private
*/
_renderValidatedComponent: function(childContext) {
var renderedComponent;
var previousContext = ReactContext.current;
ReactContext.current = this._mergeChildContext(
this._currentElement._context,
childContext
);
ReactCurrentOwner.current = this;
try {
renderedComponent =
this._renderValidatedComponentWithoutOwnerOrContext();
} finally {
ReactContext.current = previousContext;
ReactCurrentOwner.current = null;
}
("production" !== "production" ? invariant(
// TODO: An `isValidNode` function would probably be more appropriate
renderedComponent === null || renderedComponent === false ||
ReactElement.isValidElement(renderedComponent),
'%s.render(): A valid ReactComponent must be returned. You may have ' +
'returned undefined, an array or some other invalid object.',
this.getName() || 'ReactCompositeComponent'
) : invariant(// TODO: An `isValidNode` function would probably be more appropriate
renderedComponent === null || renderedComponent === false ||
ReactElement.isValidElement(renderedComponent)));
return renderedComponent;
},
/**
* Lazily allocates the refs object and stores `component` as `ref`.
*
* @param {string} ref Reference name.
* @param {component} component Component to store as `ref`.
* @final
* @private
*/
attachRef: function(ref, component) {
var inst = this.getPublicInstance();
var refs = inst.refs === emptyObject ? (inst.refs = {}) : inst.refs;
refs[ref] = component.getPublicInstance();
},
/**
* Detaches a reference name.
*
* @param {string} ref Name to dereference.
* @final
* @private
*/
detachRef: function(ref) {
var refs = this.getPublicInstance().refs;
delete refs[ref];
},
/**
* Get a text description of the component that can be used to identify it
* in error messages.
* @return {string} The name or null.
* @internal
*/
getName: function() {
var type = this._currentElement.type;
var constructor = this._instance && this._instance.constructor;
return (
type.displayName || (constructor && constructor.displayName) ||
type.name || (constructor && constructor.name) ||
null
);
},
/**
* Get the publicly accessible representation of this component - i.e. what
* is exposed by refs and returned by React.render. Can be null for stateless
* components.
*
* @return {ReactComponent} the public component instance.
* @internal
*/
getPublicInstance: function() {
return this._instance;
},
// Stub
_instantiateReactComponent: null
};
ReactPerf.measureMethods(
ReactCompositeComponentMixin,
'ReactCompositeComponent',
{
mountComponent: 'mountComponent',
updateComponent: 'updateComponent',
_renderValidatedComponent: '_renderValidatedComponent'
}
);
var ReactCompositeComponent = {
Mixin: ReactCompositeComponentMixin
};
module.exports = ReactCompositeComponent;
},{"./Object.assign":33,"./ReactComponentEnvironment":45,"./ReactContext":48,"./ReactCurrentOwner":49,"./ReactElement":67,"./ReactElementValidator":68,"./ReactInstanceMap":77,"./ReactLifeCycle":78,"./ReactNativeComponent":84,"./ReactPerf":86,"./ReactPropTypeLocationNames":88,"./ReactPropTypeLocations":89,"./ReactReconciler":93,"./ReactUpdates":104,"./emptyObject":135,"./invariant":155,"./shouldUpdateReactComponent":172,"./warning":176}],48:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactContext
*/
'use strict';
var assign = require("./Object.assign");
var emptyObject = require("./emptyObject");
var warning = require("./warning");
var didWarn = false;
/**
* Keeps track of the current context.
*
* The context is automatically passed down the component ownership hierarchy
* and is accessible via `this.context` on ReactCompositeComponents.
*/
var ReactContext = {
/**
* @internal
* @type {object}
*/
current: emptyObject,
/**
* Temporarily extends the current context while executing scopedCallback.
*
* A typical use case might look like
*
* render: function() {
* var children = ReactContext.withContext({foo: 'foo'}, () => (
*
* ));
* return <div>{children}</div>;
* }
*
* @param {object} newContext New context to merge into the existing context
* @param {function} scopedCallback Callback to run with the new context
* @return {ReactComponent|array<ReactComponent>}
*/
withContext: function(newContext, scopedCallback) {
if ("production" !== "production") {
("production" !== "production" ? warning(
didWarn,
'withContext is deprecated and will be removed in a future version. ' +
'Use a wrapper component with getChildContext instead.'
) : null);
didWarn = true;
}
var result;
var previousContext = ReactContext.current;
ReactContext.current = assign({}, previousContext, newContext);
try {
result = scopedCallback();
} finally {
ReactContext.current = previousContext;
}
return result;
}
};
module.exports = ReactContext;
},{"./Object.assign":33,"./emptyObject":135,"./warning":176}],49:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactCurrentOwner
*/
'use strict';
/**
* Keeps track of the current owner.
*
* The current owner is the component who should own any components that are
* currently being constructed.
*
* The depth indicate how many composite components are above this render level.
*/
var ReactCurrentOwner = {
/**
* @internal
* @type {ReactComponent}
*/
current: null
};
module.exports = ReactCurrentOwner;
},{}],50:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOM
* @typechecks static-only
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactElementValidator = require("./ReactElementValidator");
var mapObject = require("./mapObject");
/**
* Create a factory that creates HTML tag elements.
*
* @param {string} tag Tag name (e.g. `div`).
* @private
*/
function createDOMFactory(tag) {
if ("production" !== "production") {
return ReactElementValidator.createFactory(tag);
}
return ReactElement.createFactory(tag);
}
/**
* Creates a mapping from supported HTML tags to `ReactDOMComponent` classes.
* This is also accessible via `React.DOM`.
*
* @public
*/
var ReactDOM = mapObject({
a: 'a',
abbr: 'abbr',
address: 'address',
area: 'area',
article: 'article',
aside: 'aside',
audio: 'audio',
b: 'b',
base: 'base',
bdi: 'bdi',
bdo: 'bdo',
big: 'big',
blockquote: 'blockquote',
body: 'body',
br: 'br',
button: 'button',
canvas: 'canvas',
caption: 'caption',
cite: 'cite',
code: 'code',
col: 'col',
colgroup: 'colgroup',
data: 'data',
datalist: 'datalist',
dd: 'dd',
del: 'del',
details: 'details',
dfn: 'dfn',
dialog: 'dialog',
div: 'div',
dl: 'dl',
dt: 'dt',
em: 'em',
embed: 'embed',
fieldset: 'fieldset',
figcaption: 'figcaption',
figure: 'figure',
footer: 'footer',
form: 'form',
h1: 'h1',
h2: 'h2',
h3: 'h3',
h4: 'h4',
h5: 'h5',
h6: 'h6',
head: 'head',
header: 'header',
hr: 'hr',
html: 'html',
i: 'i',
iframe: 'iframe',
img: 'img',
input: 'input',
ins: 'ins',
kbd: 'kbd',
keygen: 'keygen',
label: 'label',
legend: 'legend',
li: 'li',
link: 'link',
main: 'main',
map: 'map',
mark: 'mark',
menu: 'menu',
menuitem: 'menuitem',
meta: 'meta',
meter: 'meter',
nav: 'nav',
noscript: 'noscript',
object: 'object',
ol: 'ol',
optgroup: 'optgroup',
option: 'option',
output: 'output',
p: 'p',
param: 'param',
picture: 'picture',
pre: 'pre',
progress: 'progress',
q: 'q',
rp: 'rp',
rt: 'rt',
ruby: 'ruby',
s: 's',
samp: 'samp',
script: 'script',
section: 'section',
select: 'select',
small: 'small',
source: 'source',
span: 'span',
strong: 'strong',
style: 'style',
sub: 'sub',
summary: 'summary',
sup: 'sup',
table: 'table',
tbody: 'tbody',
td: 'td',
textarea: 'textarea',
tfoot: 'tfoot',
th: 'th',
thead: 'thead',
time: 'time',
title: 'title',
tr: 'tr',
track: 'track',
u: 'u',
ul: 'ul',
'var': 'var',
video: 'video',
wbr: 'wbr',
// SVG
circle: 'circle',
clipPath: 'clipPath',
defs: 'defs',
ellipse: 'ellipse',
g: 'g',
line: 'line',
linearGradient: 'linearGradient',
mask: 'mask',
path: 'path',
pattern: 'pattern',
polygon: 'polygon',
polyline: 'polyline',
radialGradient: 'radialGradient',
rect: 'rect',
stop: 'stop',
svg: 'svg',
text: 'text',
tspan: 'tspan'
}, createDOMFactory);
module.exports = ReactDOM;
},{"./ReactElement":67,"./ReactElementValidator":68,"./mapObject":163}],51:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMButton
*/
'use strict';
var AutoFocusMixin = require("./AutoFocusMixin");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var keyMirror = require("./keyMirror");
var button = ReactElement.createFactory('button');
var mouseListenerNames = keyMirror({
onClick: true,
onDoubleClick: true,
onMouseDown: true,
onMouseMove: true,
onMouseUp: true,
onClickCapture: true,
onDoubleClickCapture: true,
onMouseDownCapture: true,
onMouseMoveCapture: true,
onMouseUpCapture: true
});
/**
* Implements a <button> native component that does not receive mouse events
* when `disabled` is set.
*/
var ReactDOMButton = ReactClass.createClass({
displayName: 'ReactDOMButton',
tagName: 'BUTTON',
mixins: [AutoFocusMixin, ReactBrowserComponentMixin],
render: function() {
var props = {};
// Copy the props; except the mouse listeners if we're disabled
for (var key in this.props) {
if (this.props.hasOwnProperty(key) &&
(!this.props.disabled || !mouseListenerNames[key])) {
props[key] = this.props[key];
}
}
return button(props, this.props.children);
}
});
module.exports = ReactDOMButton;
},{"./AutoFocusMixin":6,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67,"./keyMirror":161}],52:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMComponent
* @typechecks static-only
*/
/* global hasOwnProperty:true */
'use strict';
var CSSPropertyOperations = require("./CSSPropertyOperations");
var DOMProperty = require("./DOMProperty");
var DOMPropertyOperations = require("./DOMPropertyOperations");
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var ReactComponentBrowserEnvironment =
require("./ReactComponentBrowserEnvironment");
var ReactMount = require("./ReactMount");
var ReactMultiChild = require("./ReactMultiChild");
var ReactPerf = require("./ReactPerf");
var assign = require("./Object.assign");
var escapeTextContentForBrowser = require("./escapeTextContentForBrowser");
var invariant = require("./invariant");
var isEventSupported = require("./isEventSupported");
var keyOf = require("./keyOf");
var warning = require("./warning");
var deleteListener = ReactBrowserEventEmitter.deleteListener;
var listenTo = ReactBrowserEventEmitter.listenTo;
var registrationNameModules = ReactBrowserEventEmitter.registrationNameModules;
// For quickly matching children type, to test if can be treated as content.
var CONTENT_TYPES = {'string': true, 'number': true};
var STYLE = keyOf({style: null});
var ELEMENT_NODE_TYPE = 1;
/**
* Optionally injectable operations for mutating the DOM
*/
var BackendIDOperations = null;
/**
* @param {?object} props
*/
function assertValidProps(props) {
if (!props) {
return;
}
// Note the use of `==` which checks for null or undefined.
if (props.dangerouslySetInnerHTML != null) {
("production" !== "production" ? invariant(
props.children == null,
'Can only set one of `children` or `props.dangerouslySetInnerHTML`.'
) : invariant(props.children == null));
("production" !== "production" ? invariant(
typeof props.dangerouslySetInnerHTML === 'object' &&
'__html' in props.dangerouslySetInnerHTML,
'`props.dangerouslySetInnerHTML` must be in the form `{__html: ...}`. ' +
'Please visit https://fb.me/react-invariant-dangerously-set-inner-html ' +
'for more information.'
) : invariant(typeof props.dangerouslySetInnerHTML === 'object' &&
'__html' in props.dangerouslySetInnerHTML));
}
if ("production" !== "production") {
("production" !== "production" ? warning(
props.innerHTML == null,
'Directly setting property `innerHTML` is not permitted. ' +
'For more information, lookup documentation on `dangerouslySetInnerHTML`.'
) : null);
("production" !== "production" ? warning(
!props.contentEditable || props.children == null,
'A component is `contentEditable` and contains `children` managed by ' +
'React. It is now your responsibility to guarantee that none of ' +
'those nodes are unexpectedly modified or duplicated. This is ' +
'probably not intentional.'
) : null);
}
("production" !== "production" ? invariant(
props.style == null || typeof props.style === 'object',
'The `style` prop expects a mapping from style properties to values, ' +
'not a string. For example, style={{marginRight: spacing + \'em\'}} when ' +
'using JSX.'
) : invariant(props.style == null || typeof props.style === 'object'));
}
function putListener(id, registrationName, listener, transaction) {
if ("production" !== "production") {
// IE8 has no API for event capturing and the `onScroll` event doesn't
// bubble.
("production" !== "production" ? warning(
registrationName !== 'onScroll' || isEventSupported('scroll', true),
'This browser doesn\'t support the `onScroll` event'
) : null);
}
var container = ReactMount.findReactContainerForID(id);
if (container) {
var doc = container.nodeType === ELEMENT_NODE_TYPE ?
container.ownerDocument :
container;
listenTo(registrationName, doc);
}
transaction.getPutListenerQueue().enqueuePutListener(
id,
registrationName,
listener
);
}
// For HTML, certain tags should omit their close tag. We keep a whitelist for
// those special cased tags.
var omittedCloseTags = {
'area': true,
'base': true,
'br': true,
'col': true,
'embed': true,
'hr': true,
'img': true,
'input': true,
'keygen': true,
'link': true,
'meta': true,
'param': true,
'source': true,
'track': true,
'wbr': true
// NOTE: menuitem's close tag should be omitted, but that causes problems.
};
// We accept any tag to be rendered but since this gets injected into abitrary
// HTML, we want to make sure that it's a safe tag.
// http://www.w3.org/TR/REC-xml/#NT-Name
var VALID_TAG_REGEX = /^[a-zA-Z][a-zA-Z:_\.\-\d]*$/; // Simplified subset
var validatedTagCache = {};
var hasOwnProperty = {}.hasOwnProperty;
function validateDangerousTag(tag) {
if (!hasOwnProperty.call(validatedTagCache, tag)) {
("production" !== "production" ? invariant(VALID_TAG_REGEX.test(tag), 'Invalid tag: %s', tag) : invariant(VALID_TAG_REGEX.test(tag)));
validatedTagCache[tag] = true;
}
}
/**
* Creates a new React class that is idempotent and capable of containing other
* React components. It accepts event listeners and DOM properties that are
* valid according to `DOMProperty`.
*
* - Event listeners: `onClick`, `onMouseDown`, etc.
* - DOM properties: `className`, `name`, `title`, etc.
*
* The `style` property functions differently from the DOM API. It accepts an
* object mapping of style properties to values.
*
* @constructor ReactDOMComponent
* @extends ReactMultiChild
*/
function ReactDOMComponent(tag) {
validateDangerousTag(tag);
this._tag = tag;
this._renderedChildren = null;
this._previousStyleCopy = null;
this._rootNodeID = null;
}
ReactDOMComponent.displayName = 'ReactDOMComponent';
ReactDOMComponent.Mixin = {
construct: function(element) {
this._currentElement = element;
},
/**
* Generates root tag markup then recurses. This method has side effects and
* is not idempotent.
*
* @internal
* @param {string} rootID The root DOM ID for this node.
* @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction
* @return {string} The computed markup.
*/
mountComponent: function(rootID, transaction, context) {
this._rootNodeID = rootID;
assertValidProps(this._currentElement.props);
var closeTag = omittedCloseTags[this._tag] ? '' : '</' + this._tag + '>';
return (
this._createOpenTagMarkupAndPutListeners(transaction) +
this._createContentMarkup(transaction, context) +
closeTag
);
},
/**
* Creates markup for the open tag and all attributes.
*
* This method has side effects because events get registered.
*
* Iterating over object properties is faster than iterating over arrays.
* @see http://jsperf.com/obj-vs-arr-iteration
*
* @private
* @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction
* @return {string} Markup of opening tag.
*/
_createOpenTagMarkupAndPutListeners: function(transaction) {
var props = this._currentElement.props;
var ret = '<' + this._tag;
for (var propKey in props) {
if (!props.hasOwnProperty(propKey)) {
continue;
}
var propValue = props[propKey];
if (propValue == null) {
continue;
}
if (registrationNameModules.hasOwnProperty(propKey)) {
putListener(this._rootNodeID, propKey, propValue, transaction);
} else {
if (propKey === STYLE) {
if (propValue) {
propValue = this._previousStyleCopy = assign({}, props.style);
}
propValue = CSSPropertyOperations.createMarkupForStyles(propValue);
}
var markup =
DOMPropertyOperations.createMarkupForProperty(propKey, propValue);
if (markup) {
ret += ' ' + markup;
}
}
}
// For static pages, no need to put React ID and checksum. Saves lots of
// bytes.
if (transaction.renderToStaticMarkup) {
return ret + '>';
}
var markupForID = DOMPropertyOperations.createMarkupForID(this._rootNodeID);
return ret + ' ' + markupForID + '>';
},
/**
* Creates markup for the content between the tags.
*
* @private
* @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction
* @param {object} context
* @return {string} Content markup.
*/
_createContentMarkup: function(transaction, context) {
var prefix = '';
if (this._tag === 'listing' ||
this._tag === 'pre' ||
this._tag === 'textarea') {
// Add an initial newline because browsers ignore the first newline in
// a <listing>, <pre>, or <textarea> as an "authoring convenience" -- see
// https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inbody.
prefix = '\n';
}
var props = this._currentElement.props;
// Intentional use of != to avoid catching zero/false.
var innerHTML = props.dangerouslySetInnerHTML;
if (innerHTML != null) {
if (innerHTML.__html != null) {
return prefix + innerHTML.__html;
}
} else {
var contentToUse =
CONTENT_TYPES[typeof props.children] ? props.children : null;
var childrenToUse = contentToUse != null ? null : props.children;
if (contentToUse != null) {
return prefix + escapeTextContentForBrowser(contentToUse);
} else if (childrenToUse != null) {
var mountImages = this.mountChildren(
childrenToUse,
transaction,
context
);
return prefix + mountImages.join('');
}
}
return prefix;
},
receiveComponent: function(nextElement, transaction, context) {
var prevElement = this._currentElement;
this._currentElement = nextElement;
this.updateComponent(transaction, prevElement, nextElement, context);
},
/**
* Updates a native DOM component after it has already been allocated and
* attached to the DOM. Reconciles the root DOM node, then recurses.
*
* @param {ReactReconcileTransaction} transaction
* @param {ReactElement} prevElement
* @param {ReactElement} nextElement
* @internal
* @overridable
*/
updateComponent: function(transaction, prevElement, nextElement, context) {
assertValidProps(this._currentElement.props);
this._updateDOMProperties(prevElement.props, transaction);
this._updateDOMChildren(prevElement.props, transaction, context);
},
/**
* Reconciles the properties by detecting differences in property values and
* updating the DOM as necessary. This function is probably the single most
* critical path for performance optimization.
*
* TODO: Benchmark whether checking for changed values in memory actually
* improves performance (especially statically positioned elements).
* TODO: Benchmark the effects of putting this at the top since 99% of props
* do not change for a given reconciliation.
* TODO: Benchmark areas that can be improved with caching.
*
* @private
* @param {object} lastProps
* @param {ReactReconcileTransaction} transaction
*/
_updateDOMProperties: function(lastProps, transaction) {
var nextProps = this._currentElement.props;
var propKey;
var styleName;
var styleUpdates;
for (propKey in lastProps) {
if (nextProps.hasOwnProperty(propKey) ||
!lastProps.hasOwnProperty(propKey)) {
continue;
}
if (propKey === STYLE) {
var lastStyle = this._previousStyleCopy;
for (styleName in lastStyle) {
if (lastStyle.hasOwnProperty(styleName)) {
styleUpdates = styleUpdates || {};
styleUpdates[styleName] = '';
}
}
this._previousStyleCopy = null;
} else if (registrationNameModules.hasOwnProperty(propKey)) {
deleteListener(this._rootNodeID, propKey);
} else if (
DOMProperty.isStandardName[propKey] ||
DOMProperty.isCustomAttribute(propKey)) {
BackendIDOperations.deletePropertyByID(
this._rootNodeID,
propKey
);
}
}
for (propKey in nextProps) {
var nextProp = nextProps[propKey];
var lastProp = propKey === STYLE ?
this._previousStyleCopy :
lastProps[propKey];
if (!nextProps.hasOwnProperty(propKey) || nextProp === lastProp) {
continue;
}
if (propKey === STYLE) {
if (nextProp) {
nextProp = this._previousStyleCopy = assign({}, nextProp);
} else {
this._previousStyleCopy = null;
}
if (lastProp) {
// Unset styles on `lastProp` but not on `nextProp`.
for (styleName in lastProp) {
if (lastProp.hasOwnProperty(styleName) &&
(!nextProp || !nextProp.hasOwnProperty(styleName))) {
styleUpdates = styleUpdates || {};
styleUpdates[styleName] = '';
}
}
// Update styles that changed since `lastProp`.
for (styleName in nextProp) {
if (nextProp.hasOwnProperty(styleName) &&
lastProp[styleName] !== nextProp[styleName]) {
styleUpdates = styleUpdates || {};
styleUpdates[styleName] = nextProp[styleName];
}
}
} else {
// Relies on `updateStylesByID` not mutating `styleUpdates`.
styleUpdates = nextProp;
}
} else if (registrationNameModules.hasOwnProperty(propKey)) {
putListener(this._rootNodeID, propKey, nextProp, transaction);
} else if (
DOMProperty.isStandardName[propKey] ||
DOMProperty.isCustomAttribute(propKey)) {
BackendIDOperations.updatePropertyByID(
this._rootNodeID,
propKey,
nextProp
);
}
}
if (styleUpdates) {
BackendIDOperations.updateStylesByID(
this._rootNodeID,
styleUpdates
);
}
},
/**
* Reconciles the children with the various properties that affect the
* children content.
*
* @param {object} lastProps
* @param {ReactReconcileTransaction} transaction
*/
_updateDOMChildren: function(lastProps, transaction, context) {
var nextProps = this._currentElement.props;
var lastContent =
CONTENT_TYPES[typeof lastProps.children] ? lastProps.children : null;
var nextContent =
CONTENT_TYPES[typeof nextProps.children] ? nextProps.children : null;
var lastHtml =
lastProps.dangerouslySetInnerHTML &&
lastProps.dangerouslySetInnerHTML.__html;
var nextHtml =
nextProps.dangerouslySetInnerHTML &&
nextProps.dangerouslySetInnerHTML.__html;
// Note the use of `!=` which checks for null or undefined.
var lastChildren = lastContent != null ? null : lastProps.children;
var nextChildren = nextContent != null ? null : nextProps.children;
// If we're switching from children to content/html or vice versa, remove
// the old content
var lastHasContentOrHtml = lastContent != null || lastHtml != null;
var nextHasContentOrHtml = nextContent != null || nextHtml != null;
if (lastChildren != null && nextChildren == null) {
this.updateChildren(null, transaction, context);
} else if (lastHasContentOrHtml && !nextHasContentOrHtml) {
this.updateTextContent('');
}
if (nextContent != null) {
if (lastContent !== nextContent) {
this.updateTextContent('' + nextContent);
}
} else if (nextHtml != null) {
if (lastHtml !== nextHtml) {
BackendIDOperations.updateInnerHTMLByID(
this._rootNodeID,
nextHtml
);
}
} else if (nextChildren != null) {
this.updateChildren(nextChildren, transaction, context);
}
},
/**
* Destroys all event registrations for this instance. Does not remove from
* the DOM. That must be done by the parent.
*
* @internal
*/
unmountComponent: function() {
this.unmountChildren();
ReactBrowserEventEmitter.deleteAllListeners(this._rootNodeID);
ReactComponentBrowserEnvironment.unmountIDFromEnvironment(this._rootNodeID);
this._rootNodeID = null;
}
};
ReactPerf.measureMethods(ReactDOMComponent, 'ReactDOMComponent', {
mountComponent: 'mountComponent',
updateComponent: 'updateComponent'
});
assign(
ReactDOMComponent.prototype,
ReactDOMComponent.Mixin,
ReactMultiChild.Mixin
);
ReactDOMComponent.injection = {
injectIDOperations: function(IDOperations) {
ReactDOMComponent.BackendIDOperations = BackendIDOperations = IDOperations;
}
};
module.exports = ReactDOMComponent;
},{"./CSSPropertyOperations":10,"./DOMProperty":15,"./DOMPropertyOperations":16,"./Object.assign":33,"./ReactBrowserEventEmitter":37,"./ReactComponentBrowserEnvironment":44,"./ReactMount":81,"./ReactMultiChild":82,"./ReactPerf":86,"./escapeTextContentForBrowser":136,"./invariant":155,"./isEventSupported":156,"./keyOf":162,"./warning":176}],53:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMForm
*/
'use strict';
var EventConstants = require("./EventConstants");
var LocalEventTrapMixin = require("./LocalEventTrapMixin");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var form = ReactElement.createFactory('form');
/**
* Since onSubmit doesn't bubble OR capture on the top level in IE8, we need
* to capture it on the <form> element itself. There are lots of hacks we could
* do to accomplish this, but the most reliable is to make <form> a
* composite component and use `componentDidMount` to attach the event handlers.
*/
var ReactDOMForm = ReactClass.createClass({
displayName: 'ReactDOMForm',
tagName: 'FORM',
mixins: [ReactBrowserComponentMixin, LocalEventTrapMixin],
render: function() {
// TODO: Instead of using `ReactDOM` directly, we should use JSX. However,
// `jshint` fails to parse JSX so in order for linting to work in the open
// source repo, we need to just use `ReactDOM.form`.
return form(this.props);
},
componentDidMount: function() {
this.trapBubbledEvent(EventConstants.topLevelTypes.topReset, 'reset');
this.trapBubbledEvent(EventConstants.topLevelTypes.topSubmit, 'submit');
}
});
module.exports = ReactDOMForm;
},{"./EventConstants":20,"./LocalEventTrapMixin":31,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67}],54:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMIDOperations
* @typechecks static-only
*/
/*jslint evil: true */
'use strict';
var CSSPropertyOperations = require("./CSSPropertyOperations");
var DOMChildrenOperations = require("./DOMChildrenOperations");
var DOMPropertyOperations = require("./DOMPropertyOperations");
var ReactMount = require("./ReactMount");
var ReactPerf = require("./ReactPerf");
var invariant = require("./invariant");
var setInnerHTML = require("./setInnerHTML");
/**
* Errors for properties that should not be updated with `updatePropertyById()`.
*
* @type {object}
* @private
*/
var INVALID_PROPERTY_ERRORS = {
dangerouslySetInnerHTML:
'`dangerouslySetInnerHTML` must be set using `updateInnerHTMLByID()`.',
style: '`style` must be set using `updateStylesByID()`.'
};
/**
* Operations used to process updates to DOM nodes. This is made injectable via
* `ReactDOMComponent.BackendIDOperations`.
*/
var ReactDOMIDOperations = {
/**
* Updates a DOM node with new property values. This should only be used to
* update DOM properties in `DOMProperty`.
*
* @param {string} id ID of the node to update.
* @param {string} name A valid property name, see `DOMProperty`.
* @param {*} value New value of the property.
* @internal
*/
updatePropertyByID: function(id, name, value) {
var node = ReactMount.getNode(id);
("production" !== "production" ? invariant(
!INVALID_PROPERTY_ERRORS.hasOwnProperty(name),
'updatePropertyByID(...): %s',
INVALID_PROPERTY_ERRORS[name]
) : invariant(!INVALID_PROPERTY_ERRORS.hasOwnProperty(name)));
// If we're updating to null or undefined, we should remove the property
// from the DOM node instead of inadvertantly setting to a string. This
// brings us in line with the same behavior we have on initial render.
if (value != null) {
DOMPropertyOperations.setValueForProperty(node, name, value);
} else {
DOMPropertyOperations.deleteValueForProperty(node, name);
}
},
/**
* Updates a DOM node to remove a property. This should only be used to remove
* DOM properties in `DOMProperty`.
*
* @param {string} id ID of the node to update.
* @param {string} name A property name to remove, see `DOMProperty`.
* @internal
*/
deletePropertyByID: function(id, name, value) {
var node = ReactMount.getNode(id);
("production" !== "production" ? invariant(
!INVALID_PROPERTY_ERRORS.hasOwnProperty(name),
'updatePropertyByID(...): %s',
INVALID_PROPERTY_ERRORS[name]
) : invariant(!INVALID_PROPERTY_ERRORS.hasOwnProperty(name)));
DOMPropertyOperations.deleteValueForProperty(node, name, value);
},
/**
* Updates a DOM node with new style values. If a value is specified as '',
* the corresponding style property will be unset.
*
* @param {string} id ID of the node to update.
* @param {object} styles Mapping from styles to values.
* @internal
*/
updateStylesByID: function(id, styles) {
var node = ReactMount.getNode(id);
CSSPropertyOperations.setValueForStyles(node, styles);
},
/**
* Updates a DOM node's innerHTML.
*
* @param {string} id ID of the node to update.
* @param {string} html An HTML string.
* @internal
*/
updateInnerHTMLByID: function(id, html) {
var node = ReactMount.getNode(id);
setInnerHTML(node, html);
},
/**
* Updates a DOM node's text content set by `props.content`.
*
* @param {string} id ID of the node to update.
* @param {string} content Text content.
* @internal
*/
updateTextContentByID: function(id, content) {
var node = ReactMount.getNode(id);
DOMChildrenOperations.updateTextContent(node, content);
},
/**
* Replaces a DOM node that exists in the document with markup.
*
* @param {string} id ID of child to be replaced.
* @param {string} markup Dangerous markup to inject in place of child.
* @internal
* @see {Danger.dangerouslyReplaceNodeWithMarkup}
*/
dangerouslyReplaceNodeWithMarkupByID: function(id, markup) {
var node = ReactMount.getNode(id);
DOMChildrenOperations.dangerouslyReplaceNodeWithMarkup(node, markup);
},
/**
* Updates a component's children by processing a series of updates.
*
* @param {array<object>} updates List of update configurations.
* @param {array<string>} markup List of markup strings.
* @internal
*/
dangerouslyProcessChildrenUpdates: function(updates, markup) {
for (var i = 0; i < updates.length; i++) {
updates[i].parentNode = ReactMount.getNode(updates[i].parentID);
}
DOMChildrenOperations.processUpdates(updates, markup);
}
};
ReactPerf.measureMethods(ReactDOMIDOperations, 'ReactDOMIDOperations', {
updatePropertyByID: 'updatePropertyByID',
deletePropertyByID: 'deletePropertyByID',
updateStylesByID: 'updateStylesByID',
updateInnerHTMLByID: 'updateInnerHTMLByID',
updateTextContentByID: 'updateTextContentByID',
dangerouslyReplaceNodeWithMarkupByID: 'dangerouslyReplaceNodeWithMarkupByID',
dangerouslyProcessChildrenUpdates: 'dangerouslyProcessChildrenUpdates'
});
module.exports = ReactDOMIDOperations;
},{"./CSSPropertyOperations":10,"./DOMChildrenOperations":14,"./DOMPropertyOperations":16,"./ReactMount":81,"./ReactPerf":86,"./invariant":155,"./setInnerHTML":169}],55:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMIframe
*/
'use strict';
var EventConstants = require("./EventConstants");
var LocalEventTrapMixin = require("./LocalEventTrapMixin");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var iframe = ReactElement.createFactory('iframe');
/**
* Since onLoad doesn't bubble OR capture on the top level in IE8, we need to
* capture it on the <iframe> element itself. There are lots of hacks we could
* do to accomplish this, but the most reliable is to make <iframe> a composite
* component and use `componentDidMount` to attach the event handlers.
*/
var ReactDOMIframe = ReactClass.createClass({
displayName: 'ReactDOMIframe',
tagName: 'IFRAME',
mixins: [ReactBrowserComponentMixin, LocalEventTrapMixin],
render: function() {
return iframe(this.props);
},
componentDidMount: function() {
this.trapBubbledEvent(EventConstants.topLevelTypes.topLoad, 'load');
}
});
module.exports = ReactDOMIframe;
},{"./EventConstants":20,"./LocalEventTrapMixin":31,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67}],56:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMImg
*/
'use strict';
var EventConstants = require("./EventConstants");
var LocalEventTrapMixin = require("./LocalEventTrapMixin");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var img = ReactElement.createFactory('img');
/**
* Since onLoad doesn't bubble OR capture on the top level in IE8, we need to
* capture it on the <img> element itself. There are lots of hacks we could do
* to accomplish this, but the most reliable is to make <img> a composite
* component and use `componentDidMount` to attach the event handlers.
*/
var ReactDOMImg = ReactClass.createClass({
displayName: 'ReactDOMImg',
tagName: 'IMG',
mixins: [ReactBrowserComponentMixin, LocalEventTrapMixin],
render: function() {
return img(this.props);
},
componentDidMount: function() {
this.trapBubbledEvent(EventConstants.topLevelTypes.topLoad, 'load');
this.trapBubbledEvent(EventConstants.topLevelTypes.topError, 'error');
}
});
module.exports = ReactDOMImg;
},{"./EventConstants":20,"./LocalEventTrapMixin":31,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67}],57:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMInput
*/
'use strict';
var AutoFocusMixin = require("./AutoFocusMixin");
var DOMPropertyOperations = require("./DOMPropertyOperations");
var LinkedValueUtils = require("./LinkedValueUtils");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var ReactMount = require("./ReactMount");
var ReactUpdates = require("./ReactUpdates");
var assign = require("./Object.assign");
var invariant = require("./invariant");
var input = ReactElement.createFactory('input');
var instancesByReactID = {};
function forceUpdateIfMounted() {
/*jshint validthis:true */
if (this.isMounted()) {
this.forceUpdate();
}
}
/**
* Implements an <input> native component that allows setting these optional
* props: `checked`, `value`, `defaultChecked`, and `defaultValue`.
*
* If `checked` or `value` are not supplied (or null/undefined), user actions
* that affect the checked state or value will trigger updates to the element.
*
* If they are supplied (and not null/undefined), the rendered element will not
* trigger updates to the element. Instead, the props must change in order for
* the rendered element to be updated.
*
* The rendered element will be initialized as unchecked (or `defaultChecked`)
* with an empty value (or `defaultValue`).
*
* @see http://www.w3.org/TR/2012/WD-html5-20121025/the-input-element.html
*/
var ReactDOMInput = ReactClass.createClass({
displayName: 'ReactDOMInput',
tagName: 'INPUT',
mixins: [AutoFocusMixin, LinkedValueUtils.Mixin, ReactBrowserComponentMixin],
getInitialState: function() {
var defaultValue = this.props.defaultValue;
return {
initialChecked: this.props.defaultChecked || false,
initialValue: defaultValue != null ? defaultValue : null
};
},
render: function() {
// Clone `this.props` so we don't mutate the input.
var props = assign({}, this.props);
props.defaultChecked = null;
props.defaultValue = null;
var value = LinkedValueUtils.getValue(this);
props.value = value != null ? value : this.state.initialValue;
var checked = LinkedValueUtils.getChecked(this);
props.checked = checked != null ? checked : this.state.initialChecked;
props.onChange = this._handleChange;
return input(props, this.props.children);
},
componentDidMount: function() {
var id = ReactMount.getID(this.getDOMNode());
instancesByReactID[id] = this;
},
componentWillUnmount: function() {
var rootNode = this.getDOMNode();
var id = ReactMount.getID(rootNode);
delete instancesByReactID[id];
},
componentDidUpdate: function(prevProps, prevState, prevContext) {
var rootNode = this.getDOMNode();
if (this.props.checked != null) {
DOMPropertyOperations.setValueForProperty(
rootNode,
'checked',
this.props.checked || false
);
}
var value = LinkedValueUtils.getValue(this);
if (value != null) {
// Cast `value` to a string to ensure the value is set correctly. While
// browsers typically do this as necessary, jsdom doesn't.
DOMPropertyOperations.setValueForProperty(rootNode, 'value', '' + value);
}
},
_handleChange: function(event) {
var returnValue;
var onChange = LinkedValueUtils.getOnChange(this);
if (onChange) {
returnValue = onChange.call(this, event);
}
// Here we use asap to wait until all updates have propagated, which
// is important when using controlled components within layers:
// https://github.com/facebook/react/issues/1698
ReactUpdates.asap(forceUpdateIfMounted, this);
var name = this.props.name;
if (this.props.type === 'radio' && name != null) {
var rootNode = this.getDOMNode();
var queryRoot = rootNode;
while (queryRoot.parentNode) {
queryRoot = queryRoot.parentNode;
}
// If `rootNode.form` was non-null, then we could try `form.elements`,
// but that sometimes behaves strangely in IE8. We could also try using
// `form.getElementsByName`, but that will only return direct children
// and won't include inputs that use the HTML5 `form=` attribute. Since
// the input might not even be in a form, let's just use the global
// `querySelectorAll` to ensure we don't miss anything.
var group = queryRoot.querySelectorAll(
'input[name=' + JSON.stringify('' + name) + '][type="radio"]');
for (var i = 0, groupLen = group.length; i < groupLen; i++) {
var otherNode = group[i];
if (otherNode === rootNode ||
otherNode.form !== rootNode.form) {
continue;
}
var otherID = ReactMount.getID(otherNode);
("production" !== "production" ? invariant(
otherID,
'ReactDOMInput: Mixing React and non-React radio inputs with the ' +
'same `name` is not supported.'
) : invariant(otherID));
var otherInstance = instancesByReactID[otherID];
("production" !== "production" ? invariant(
otherInstance,
'ReactDOMInput: Unknown radio button ID %s.',
otherID
) : invariant(otherInstance));
// If this is a controlled radio button group, forcing the input that
// was previously checked to update will cause it to be come re-checked
// as appropriate.
ReactUpdates.asap(forceUpdateIfMounted, otherInstance);
}
}
return returnValue;
}
});
module.exports = ReactDOMInput;
},{"./AutoFocusMixin":6,"./DOMPropertyOperations":16,"./LinkedValueUtils":30,"./Object.assign":33,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67,"./ReactMount":81,"./ReactUpdates":104,"./invariant":155}],58:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMOption
*/
'use strict';
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var warning = require("./warning");
var option = ReactElement.createFactory('option');
/**
* Implements an <option> native component that warns when `selected` is set.
*/
var ReactDOMOption = ReactClass.createClass({
displayName: 'ReactDOMOption',
tagName: 'OPTION',
mixins: [ReactBrowserComponentMixin],
componentWillMount: function() {
// TODO (yungsters): Remove support for `selected` in <option>.
if ("production" !== "production") {
("production" !== "production" ? warning(
this.props.selected == null,
'Use the `defaultValue` or `value` props on <select> instead of ' +
'setting `selected` on <option>.'
) : null);
}
},
render: function() {
return option(this.props, this.props.children);
}
});
module.exports = ReactDOMOption;
},{"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67,"./warning":176}],59:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMSelect
*/
'use strict';
var AutoFocusMixin = require("./AutoFocusMixin");
var LinkedValueUtils = require("./LinkedValueUtils");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var ReactUpdates = require("./ReactUpdates");
var assign = require("./Object.assign");
var select = ReactElement.createFactory('select');
function updateOptionsIfPendingUpdateAndMounted() {
/*jshint validthis:true */
if (this._pendingUpdate) {
this._pendingUpdate = false;
var value = LinkedValueUtils.getValue(this);
if (value != null && this.isMounted()) {
updateOptions(this, value);
}
}
}
/**
* Validation function for `value` and `defaultValue`.
* @private
*/
function selectValueType(props, propName, componentName) {
if (props[propName] == null) {
return null;
}
if (props.multiple) {
if (!Array.isArray(props[propName])) {
return new Error(
("The `" + propName + "` prop supplied to <select> must be an array if ") +
("`multiple` is true.")
);
}
} else {
if (Array.isArray(props[propName])) {
return new Error(
("The `" + propName + "` prop supplied to <select> must be a scalar ") +
("value if `multiple` is false.")
);
}
}
}
/**
* @param {ReactComponent} component Instance of ReactDOMSelect
* @param {*} propValue A stringable (with `multiple`, a list of stringables).
* @private
*/
function updateOptions(component, propValue) {
var selectedValue, i, l;
var options = component.getDOMNode().options;
if (component.props.multiple) {
selectedValue = {};
for (i = 0, l = propValue.length; i < l; i++) {
selectedValue['' + propValue[i]] = true;
}
for (i = 0, l = options.length; i < l; i++) {
var selected = selectedValue.hasOwnProperty(options[i].value);
if (options[i].selected !== selected) {
options[i].selected = selected;
}
}
} else {
// Do not set `select.value` as exact behavior isn't consistent across all
// browsers for all cases.
selectedValue = '' + propValue;
for (i = 0, l = options.length; i < l; i++) {
if (options[i].value === selectedValue) {
options[i].selected = true;
return;
}
}
if (options.length) {
options[0].selected = true;
}
}
}
/**
* Implements a <select> native component that allows optionally setting the
* props `value` and `defaultValue`. If `multiple` is false, the prop must be a
* stringable. If `multiple` is true, the prop must be an array of stringables.
*
* If `value` is not supplied (or null/undefined), user actions that change the
* selected option will trigger updates to the rendered options.
*
* If it is supplied (and not null/undefined), the rendered options will not
* update in response to user actions. Instead, the `value` prop must change in
* order for the rendered options to update.
*
* If `defaultValue` is provided, any options with the supplied values will be
* selected.
*/
var ReactDOMSelect = ReactClass.createClass({
displayName: 'ReactDOMSelect',
tagName: 'SELECT',
mixins: [AutoFocusMixin, LinkedValueUtils.Mixin, ReactBrowserComponentMixin],
propTypes: {
defaultValue: selectValueType,
value: selectValueType
},
render: function() {
// Clone `this.props` so we don't mutate the input.
var props = assign({}, this.props);
props.onChange = this._handleChange;
props.value = null;
return select(props, this.props.children);
},
componentWillMount: function() {
this._pendingUpdate = false;
},
componentDidMount: function() {
var value = LinkedValueUtils.getValue(this);
if (value != null) {
updateOptions(this, value);
} else if (this.props.defaultValue != null) {
updateOptions(this, this.props.defaultValue);
}
},
componentDidUpdate: function(prevProps) {
var value = LinkedValueUtils.getValue(this);
if (value != null) {
this._pendingUpdate = false;
updateOptions(this, value);
} else if (!prevProps.multiple !== !this.props.multiple) {
// For simplicity, reapply `defaultValue` if `multiple` is toggled.
if (this.props.defaultValue != null) {
updateOptions(this, this.props.defaultValue);
} else {
// Revert the select back to its default unselected state.
updateOptions(this, this.props.multiple ? [] : '');
}
}
},
_handleChange: function(event) {
var returnValue;
var onChange = LinkedValueUtils.getOnChange(this);
if (onChange) {
returnValue = onChange.call(this, event);
}
this._pendingUpdate = true;
ReactUpdates.asap(updateOptionsIfPendingUpdateAndMounted, this);
return returnValue;
}
});
module.exports = ReactDOMSelect;
},{"./AutoFocusMixin":6,"./LinkedValueUtils":30,"./Object.assign":33,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67,"./ReactUpdates":104}],60:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMSelection
*/
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
var getNodeForCharacterOffset = require("./getNodeForCharacterOffset");
var getTextContentAccessor = require("./getTextContentAccessor");
/**
* While `isCollapsed` is available on the Selection object and `collapsed`
* is available on the Range object, IE11 sometimes gets them wrong.
* If the anchor/focus nodes and offsets are the same, the range is collapsed.
*/
function isCollapsed(anchorNode, anchorOffset, focusNode, focusOffset) {
return anchorNode === focusNode && anchorOffset === focusOffset;
}
/**
* Get the appropriate anchor and focus node/offset pairs for IE.
*
* The catch here is that IE's selection API doesn't provide information
* about whether the selection is forward or backward, so we have to
* behave as though it's always forward.
*
* IE text differs from modern selection in that it behaves as though
* block elements end with a new line. This means character offsets will
* differ between the two APIs.
*
* @param {DOMElement} node
* @return {object}
*/
function getIEOffsets(node) {
var selection = document.selection;
var selectedRange = selection.createRange();
var selectedLength = selectedRange.text.length;
// Duplicate selection so we can move range without breaking user selection.
var fromStart = selectedRange.duplicate();
fromStart.moveToElementText(node);
fromStart.setEndPoint('EndToStart', selectedRange);
var startOffset = fromStart.text.length;
var endOffset = startOffset + selectedLength;
return {
start: startOffset,
end: endOffset
};
}
/**
* @param {DOMElement} node
* @return {?object}
*/
function getModernOffsets(node) {
var selection = window.getSelection && window.getSelection();
if (!selection || selection.rangeCount === 0) {
return null;
}
var anchorNode = selection.anchorNode;
var anchorOffset = selection.anchorOffset;
var focusNode = selection.focusNode;
var focusOffset = selection.focusOffset;
var currentRange = selection.getRangeAt(0);
// If the node and offset values are the same, the selection is collapsed.
// `Selection.isCollapsed` is available natively, but IE sometimes gets
// this value wrong.
var isSelectionCollapsed = isCollapsed(
selection.anchorNode,
selection.anchorOffset,
selection.focusNode,
selection.focusOffset
);
var rangeLength = isSelectionCollapsed ? 0 : currentRange.toString().length;
var tempRange = currentRange.cloneRange();
tempRange.selectNodeContents(node);
tempRange.setEnd(currentRange.startContainer, currentRange.startOffset);
var isTempRangeCollapsed = isCollapsed(
tempRange.startContainer,
tempRange.startOffset,
tempRange.endContainer,
tempRange.endOffset
);
var start = isTempRangeCollapsed ? 0 : tempRange.toString().length;
var end = start + rangeLength;
// Detect whether the selection is backward.
var detectionRange = document.createRange();
detectionRange.setStart(anchorNode, anchorOffset);
detectionRange.setEnd(focusNode, focusOffset);
var isBackward = detectionRange.collapsed;
return {
start: isBackward ? end : start,
end: isBackward ? start : end
};
}
/**
* @param {DOMElement|DOMTextNode} node
* @param {object} offsets
*/
function setIEOffsets(node, offsets) {
var range = document.selection.createRange().duplicate();
var start, end;
if (typeof offsets.end === 'undefined') {
start = offsets.start;
end = start;
} else if (offsets.start > offsets.end) {
start = offsets.end;
end = offsets.start;
} else {
start = offsets.start;
end = offsets.end;
}
range.moveToElementText(node);
range.moveStart('character', start);
range.setEndPoint('EndToStart', range);
range.moveEnd('character', end - start);
range.select();
}
/**
* In modern non-IE browsers, we can support both forward and backward
* selections.
*
* Note: IE10+ supports the Selection object, but it does not support
* the `extend` method, which means that even in modern IE, it's not possible
* to programatically create a backward selection. Thus, for all IE
* versions, we use the old IE API to create our selections.
*
* @param {DOMElement|DOMTextNode} node
* @param {object} offsets
*/
function setModernOffsets(node, offsets) {
if (!window.getSelection) {
return;
}
var selection = window.getSelection();
var length = node[getTextContentAccessor()].length;
var start = Math.min(offsets.start, length);
var end = typeof offsets.end === 'undefined' ?
start : Math.min(offsets.end, length);
// IE 11 uses modern selection, but doesn't support the extend method.
// Flip backward selections, so we can set with a single range.
if (!selection.extend && start > end) {
var temp = end;
end = start;
start = temp;
}
var startMarker = getNodeForCharacterOffset(node, start);
var endMarker = getNodeForCharacterOffset(node, end);
if (startMarker && endMarker) {
var range = document.createRange();
range.setStart(startMarker.node, startMarker.offset);
selection.removeAllRanges();
if (start > end) {
selection.addRange(range);
selection.extend(endMarker.node, endMarker.offset);
} else {
range.setEnd(endMarker.node, endMarker.offset);
selection.addRange(range);
}
}
}
var useIEOffsets = (
ExecutionEnvironment.canUseDOM &&
'selection' in document &&
!('getSelection' in window)
);
var ReactDOMSelection = {
/**
* @param {DOMElement} node
*/
getOffsets: useIEOffsets ? getIEOffsets : getModernOffsets,
/**
* @param {DOMElement|DOMTextNode} node
* @param {object} offsets
*/
setOffsets: useIEOffsets ? setIEOffsets : setModernOffsets
};
module.exports = ReactDOMSelection;
},{"./ExecutionEnvironment":26,"./getNodeForCharacterOffset":148,"./getTextContentAccessor":150}],61:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMTextComponent
* @typechecks static-only
*/
'use strict';
var DOMPropertyOperations = require("./DOMPropertyOperations");
var ReactComponentBrowserEnvironment =
require("./ReactComponentBrowserEnvironment");
var ReactDOMComponent = require("./ReactDOMComponent");
var assign = require("./Object.assign");
var escapeTextContentForBrowser = require("./escapeTextContentForBrowser");
/**
* Text nodes violate a couple assumptions that React makes about components:
*
* - When mounting text into the DOM, adjacent text nodes are merged.
* - Text nodes cannot be assigned a React root ID.
*
* This component is used to wrap strings in elements so that they can undergo
* the same reconciliation that is applied to elements.
*
* TODO: Investigate representing React components in the DOM with text nodes.
*
* @class ReactDOMTextComponent
* @extends ReactComponent
* @internal
*/
var ReactDOMTextComponent = function(props) {
// This constructor and its argument is currently used by mocks.
};
assign(ReactDOMTextComponent.prototype, {
/**
* @param {ReactText} text
* @internal
*/
construct: function(text) {
// TODO: This is really a ReactText (ReactNode), not a ReactElement
this._currentElement = text;
this._stringText = '' + text;
// Properties
this._rootNodeID = null;
this._mountIndex = 0;
},
/**
* Creates the markup for this text node. This node is not intended to have
* any features besides containing text content.
*
* @param {string} rootID DOM ID of the root node.
* @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction
* @return {string} Markup for this text node.
* @internal
*/
mountComponent: function(rootID, transaction, context) {
this._rootNodeID = rootID;
var escapedText = escapeTextContentForBrowser(this._stringText);
if (transaction.renderToStaticMarkup) {
// Normally we'd wrap this in a `span` for the reasons stated above, but
// since this is a situation where React won't take over (static pages),
// we can simply return the text as it is.
return escapedText;
}
return (
'<span ' + DOMPropertyOperations.createMarkupForID(rootID) + '>' +
escapedText +
'</span>'
);
},
/**
* Updates this component by updating the text content.
*
* @param {ReactText} nextText The next text content
* @param {ReactReconcileTransaction} transaction
* @internal
*/
receiveComponent: function(nextText, transaction) {
if (nextText !== this._currentElement) {
this._currentElement = nextText;
var nextStringText = '' + nextText;
if (nextStringText !== this._stringText) {
// TODO: Save this as pending props and use performUpdateIfNecessary
// and/or updateComponent to do the actual update for consistency with
// other component types?
this._stringText = nextStringText;
ReactDOMComponent.BackendIDOperations.updateTextContentByID(
this._rootNodeID,
nextStringText
);
}
}
},
unmountComponent: function() {
ReactComponentBrowserEnvironment.unmountIDFromEnvironment(this._rootNodeID);
}
});
module.exports = ReactDOMTextComponent;
},{"./DOMPropertyOperations":16,"./Object.assign":33,"./ReactComponentBrowserEnvironment":44,"./ReactDOMComponent":52,"./escapeTextContentForBrowser":136}],62:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMTextarea
*/
'use strict';
var AutoFocusMixin = require("./AutoFocusMixin");
var DOMPropertyOperations = require("./DOMPropertyOperations");
var LinkedValueUtils = require("./LinkedValueUtils");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var ReactUpdates = require("./ReactUpdates");
var assign = require("./Object.assign");
var invariant = require("./invariant");
var warning = require("./warning");
var textarea = ReactElement.createFactory('textarea');
function forceUpdateIfMounted() {
/*jshint validthis:true */
if (this.isMounted()) {
this.forceUpdate();
}
}
/**
* Implements a <textarea> native component that allows setting `value`, and
* `defaultValue`. This differs from the traditional DOM API because value is
* usually set as PCDATA children.
*
* If `value` is not supplied (or null/undefined), user actions that affect the
* value will trigger updates to the element.
*
* If `value` is supplied (and not null/undefined), the rendered element will
* not trigger updates to the element. Instead, the `value` prop must change in
* order for the rendered element to be updated.
*
* The rendered element will be initialized with an empty value, the prop
* `defaultValue` if specified, or the children content (deprecated).
*/
var ReactDOMTextarea = ReactClass.createClass({
displayName: 'ReactDOMTextarea',
tagName: 'TEXTAREA',
mixins: [AutoFocusMixin, LinkedValueUtils.Mixin, ReactBrowserComponentMixin],
getInitialState: function() {
var defaultValue = this.props.defaultValue;
// TODO (yungsters): Remove support for children content in <textarea>.
var children = this.props.children;
if (children != null) {
if ("production" !== "production") {
("production" !== "production" ? warning(
false,
'Use the `defaultValue` or `value` props instead of setting ' +
'children on <textarea>.'
) : null);
}
("production" !== "production" ? invariant(
defaultValue == null,
'If you supply `defaultValue` on a <textarea>, do not pass children.'
) : invariant(defaultValue == null));
if (Array.isArray(children)) {
("production" !== "production" ? invariant(
children.length <= 1,
'<textarea> can only have at most one child.'
) : invariant(children.length <= 1));
children = children[0];
}
defaultValue = '' + children;
}
if (defaultValue == null) {
defaultValue = '';
}
var value = LinkedValueUtils.getValue(this);
return {
// We save the initial value so that `ReactDOMComponent` doesn't update
// `textContent` (unnecessary since we update value).
// The initial value can be a boolean or object so that's why it's
// forced to be a string.
initialValue: '' + (value != null ? value : defaultValue)
};
},
render: function() {
// Clone `this.props` so we don't mutate the input.
var props = assign({}, this.props);
("production" !== "production" ? invariant(
props.dangerouslySetInnerHTML == null,
'`dangerouslySetInnerHTML` does not make sense on <textarea>.'
) : invariant(props.dangerouslySetInnerHTML == null));
props.defaultValue = null;
props.value = null;
props.onChange = this._handleChange;
// Always set children to the same thing. In IE9, the selection range will
// get reset if `textContent` is mutated.
return textarea(props, this.state.initialValue);
},
componentDidUpdate: function(prevProps, prevState, prevContext) {
var value = LinkedValueUtils.getValue(this);
if (value != null) {
var rootNode = this.getDOMNode();
// Cast `value` to a string to ensure the value is set correctly. While
// browsers typically do this as necessary, jsdom doesn't.
DOMPropertyOperations.setValueForProperty(rootNode, 'value', '' + value);
}
},
_handleChange: function(event) {
var returnValue;
var onChange = LinkedValueUtils.getOnChange(this);
if (onChange) {
returnValue = onChange.call(this, event);
}
ReactUpdates.asap(forceUpdateIfMounted, this);
return returnValue;
}
});
module.exports = ReactDOMTextarea;
},{"./AutoFocusMixin":6,"./DOMPropertyOperations":16,"./LinkedValueUtils":30,"./Object.assign":33,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactElement":67,"./ReactUpdates":104,"./invariant":155,"./warning":176}],63:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDefaultBatchingStrategy
*/
'use strict';
var ReactUpdates = require("./ReactUpdates");
var Transaction = require("./Transaction");
var assign = require("./Object.assign");
var emptyFunction = require("./emptyFunction");
var RESET_BATCHED_UPDATES = {
initialize: emptyFunction,
close: function() {
ReactDefaultBatchingStrategy.isBatchingUpdates = false;
}
};
var FLUSH_BATCHED_UPDATES = {
initialize: emptyFunction,
close: ReactUpdates.flushBatchedUpdates.bind(ReactUpdates)
};
var TRANSACTION_WRAPPERS = [FLUSH_BATCHED_UPDATES, RESET_BATCHED_UPDATES];
function ReactDefaultBatchingStrategyTransaction() {
this.reinitializeTransaction();
}
assign(
ReactDefaultBatchingStrategyTransaction.prototype,
Transaction.Mixin,
{
getTransactionWrappers: function() {
return TRANSACTION_WRAPPERS;
}
}
);
var transaction = new ReactDefaultBatchingStrategyTransaction();
var ReactDefaultBatchingStrategy = {
isBatchingUpdates: false,
/**
* Call the provided function in a context within which calls to `setState`
* and friends are batched such that components aren't updated unnecessarily.
*/
batchedUpdates: function(callback, a, b, c, d) {
var alreadyBatchingUpdates = ReactDefaultBatchingStrategy.isBatchingUpdates;
ReactDefaultBatchingStrategy.isBatchingUpdates = true;
// The code is written this way to avoid extra allocations
if (alreadyBatchingUpdates) {
callback(a, b, c, d);
} else {
transaction.perform(callback, null, a, b, c, d);
}
}
};
module.exports = ReactDefaultBatchingStrategy;
},{"./Object.assign":33,"./ReactUpdates":104,"./Transaction":121,"./emptyFunction":134}],64:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDefaultInjection
*/
'use strict';
var BeforeInputEventPlugin = require("./BeforeInputEventPlugin");
var ChangeEventPlugin = require("./ChangeEventPlugin");
var ClientReactRootIndex = require("./ClientReactRootIndex");
var DefaultEventPluginOrder = require("./DefaultEventPluginOrder");
var EnterLeaveEventPlugin = require("./EnterLeaveEventPlugin");
var ExecutionEnvironment = require("./ExecutionEnvironment");
var HTMLDOMPropertyConfig = require("./HTMLDOMPropertyConfig");
var MobileSafariClickEventPlugin = require("./MobileSafariClickEventPlugin");
var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin");
var ReactClass = require("./ReactClass");
var ReactComponentBrowserEnvironment =
require("./ReactComponentBrowserEnvironment");
var ReactDefaultBatchingStrategy = require("./ReactDefaultBatchingStrategy");
var ReactDOMComponent = require("./ReactDOMComponent");
var ReactDOMButton = require("./ReactDOMButton");
var ReactDOMForm = require("./ReactDOMForm");
var ReactDOMImg = require("./ReactDOMImg");
var ReactDOMIDOperations = require("./ReactDOMIDOperations");
var ReactDOMIframe = require("./ReactDOMIframe");
var ReactDOMInput = require("./ReactDOMInput");
var ReactDOMOption = require("./ReactDOMOption");
var ReactDOMSelect = require("./ReactDOMSelect");
var ReactDOMTextarea = require("./ReactDOMTextarea");
var ReactDOMTextComponent = require("./ReactDOMTextComponent");
var ReactElement = require("./ReactElement");
var ReactEventListener = require("./ReactEventListener");
var ReactInjection = require("./ReactInjection");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var ReactMount = require("./ReactMount");
var ReactReconcileTransaction = require("./ReactReconcileTransaction");
var SelectEventPlugin = require("./SelectEventPlugin");
var ServerReactRootIndex = require("./ServerReactRootIndex");
var SimpleEventPlugin = require("./SimpleEventPlugin");
var SVGDOMPropertyConfig = require("./SVGDOMPropertyConfig");
var createFullPageComponent = require("./createFullPageComponent");
function autoGenerateWrapperClass(type) {
return ReactClass.createClass({
tagName: type.toUpperCase(),
render: function() {
return new ReactElement(
type,
null,
null,
null,
null,
this.props
);
}
});
}
function inject() {
ReactInjection.EventEmitter.injectReactEventListener(
ReactEventListener
);
/**
* Inject modules for resolving DOM hierarchy and plugin ordering.
*/
ReactInjection.EventPluginHub.injectEventPluginOrder(DefaultEventPluginOrder);
ReactInjection.EventPluginHub.injectInstanceHandle(ReactInstanceHandles);
ReactInjection.EventPluginHub.injectMount(ReactMount);
/**
* Some important event plugins included by default (without having to require
* them).
*/
ReactInjection.EventPluginHub.injectEventPluginsByName({
SimpleEventPlugin: SimpleEventPlugin,
EnterLeaveEventPlugin: EnterLeaveEventPlugin,
ChangeEventPlugin: ChangeEventPlugin,
MobileSafariClickEventPlugin: MobileSafariClickEventPlugin,
SelectEventPlugin: SelectEventPlugin,
BeforeInputEventPlugin: BeforeInputEventPlugin
});
ReactInjection.NativeComponent.injectGenericComponentClass(
ReactDOMComponent
);
ReactInjection.NativeComponent.injectTextComponentClass(
ReactDOMTextComponent
);
ReactInjection.NativeComponent.injectAutoWrapper(
autoGenerateWrapperClass
);
// This needs to happen before createFullPageComponent() otherwise the mixin
// won't be included.
ReactInjection.Class.injectMixin(ReactBrowserComponentMixin);
ReactInjection.NativeComponent.injectComponentClasses({
'button': ReactDOMButton,
'form': ReactDOMForm,
'iframe': ReactDOMIframe,
'img': ReactDOMImg,
'input': ReactDOMInput,
'option': ReactDOMOption,
'select': ReactDOMSelect,
'textarea': ReactDOMTextarea,
'html': createFullPageComponent('html'),
'head': createFullPageComponent('head'),
'body': createFullPageComponent('body')
});
ReactInjection.DOMProperty.injectDOMPropertyConfig(HTMLDOMPropertyConfig);
ReactInjection.DOMProperty.injectDOMPropertyConfig(SVGDOMPropertyConfig);
ReactInjection.EmptyComponent.injectEmptyComponent('noscript');
ReactInjection.Updates.injectReconcileTransaction(
ReactReconcileTransaction
);
ReactInjection.Updates.injectBatchingStrategy(
ReactDefaultBatchingStrategy
);
ReactInjection.RootIndex.injectCreateReactRootIndex(
ExecutionEnvironment.canUseDOM ?
ClientReactRootIndex.createReactRootIndex :
ServerReactRootIndex.createReactRootIndex
);
ReactInjection.Component.injectEnvironment(ReactComponentBrowserEnvironment);
ReactInjection.DOMComponent.injectIDOperations(ReactDOMIDOperations);
if ("production" !== "production") {
var url = (ExecutionEnvironment.canUseDOM && window.location.href) || '';
if ((/[?&]react_perf\b/).test(url)) {
var ReactDefaultPerf = require("./ReactDefaultPerf");
ReactDefaultPerf.start();
}
}
}
module.exports = {
inject: inject
};
},{"./BeforeInputEventPlugin":7,"./ChangeEventPlugin":12,"./ClientReactRootIndex":13,"./DefaultEventPluginOrder":18,"./EnterLeaveEventPlugin":19,"./ExecutionEnvironment":26,"./HTMLDOMPropertyConfig":28,"./MobileSafariClickEventPlugin":32,"./ReactBrowserComponentMixin":36,"./ReactClass":42,"./ReactComponentBrowserEnvironment":44,"./ReactDOMButton":51,"./ReactDOMComponent":52,"./ReactDOMForm":53,"./ReactDOMIDOperations":54,"./ReactDOMIframe":55,"./ReactDOMImg":56,"./ReactDOMInput":57,"./ReactDOMOption":58,"./ReactDOMSelect":59,"./ReactDOMTextComponent":61,"./ReactDOMTextarea":62,"./ReactDefaultBatchingStrategy":63,"./ReactDefaultPerf":65,"./ReactElement":67,"./ReactEventListener":72,"./ReactInjection":74,"./ReactInstanceHandles":76,"./ReactMount":81,"./ReactReconcileTransaction":92,"./SVGDOMPropertyConfig":106,"./SelectEventPlugin":107,"./ServerReactRootIndex":108,"./SimpleEventPlugin":109,"./createFullPageComponent":130}],65:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDefaultPerf
* @typechecks static-only
*/
'use strict';
var DOMProperty = require("./DOMProperty");
var ReactDefaultPerfAnalysis = require("./ReactDefaultPerfAnalysis");
var ReactMount = require("./ReactMount");
var ReactPerf = require("./ReactPerf");
var performanceNow = require("./performanceNow");
function roundFloat(val) {
return Math.floor(val * 100) / 100;
}
function addValue(obj, key, val) {
obj[key] = (obj[key] || 0) + val;
}
var ReactDefaultPerf = {
_allMeasurements: [], // last item in the list is the current one
_mountStack: [0],
_injected: false,
start: function() {
if (!ReactDefaultPerf._injected) {
ReactPerf.injection.injectMeasure(ReactDefaultPerf.measure);
}
ReactDefaultPerf._allMeasurements.length = 0;
ReactPerf.enableMeasure = true;
},
stop: function() {
ReactPerf.enableMeasure = false;
},
getLastMeasurements: function() {
return ReactDefaultPerf._allMeasurements;
},
printExclusive: function(measurements) {
measurements = measurements || ReactDefaultPerf._allMeasurements;
var summary = ReactDefaultPerfAnalysis.getExclusiveSummary(measurements);
console.table(summary.map(function(item) {
return {
'Component class name': item.componentName,
'Total inclusive time (ms)': roundFloat(item.inclusive),
'Exclusive mount time (ms)': roundFloat(item.exclusive),
'Exclusive render time (ms)': roundFloat(item.render),
'Mount time per instance (ms)': roundFloat(item.exclusive / item.count),
'Render time per instance (ms)': roundFloat(item.render / item.count),
'Instances': item.count
};
}));
// TODO: ReactDefaultPerfAnalysis.getTotalTime() does not return the correct
// number.
},
printInclusive: function(measurements) {
measurements = measurements || ReactDefaultPerf._allMeasurements;
var summary = ReactDefaultPerfAnalysis.getInclusiveSummary(measurements);
console.table(summary.map(function(item) {
return {
'Owner > component': item.componentName,
'Inclusive time (ms)': roundFloat(item.time),
'Instances': item.count
};
}));
console.log(
'Total time:',
ReactDefaultPerfAnalysis.getTotalTime(measurements).toFixed(2) + ' ms'
);
},
getMeasurementsSummaryMap: function(measurements) {
var summary = ReactDefaultPerfAnalysis.getInclusiveSummary(
measurements,
true
);
return summary.map(function(item) {
return {
'Owner > component': item.componentName,
'Wasted time (ms)': item.time,
'Instances': item.count
};
});
},
printWasted: function(measurements) {
measurements = measurements || ReactDefaultPerf._allMeasurements;
console.table(ReactDefaultPerf.getMeasurementsSummaryMap(measurements));
console.log(
'Total time:',
ReactDefaultPerfAnalysis.getTotalTime(measurements).toFixed(2) + ' ms'
);
},
printDOM: function(measurements) {
measurements = measurements || ReactDefaultPerf._allMeasurements;
var summary = ReactDefaultPerfAnalysis.getDOMSummary(measurements);
console.table(summary.map(function(item) {
var result = {};
result[DOMProperty.ID_ATTRIBUTE_NAME] = item.id;
result['type'] = item.type;
result['args'] = JSON.stringify(item.args);
return result;
}));
console.log(
'Total time:',
ReactDefaultPerfAnalysis.getTotalTime(measurements).toFixed(2) + ' ms'
);
},
_recordWrite: function(id, fnName, totalTime, args) {
// TODO: totalTime isn't that useful since it doesn't count paints/reflows
var writes =
ReactDefaultPerf
._allMeasurements[ReactDefaultPerf._allMeasurements.length - 1]
.writes;
writes[id] = writes[id] || [];
writes[id].push({
type: fnName,
time: totalTime,
args: args
});
},
measure: function(moduleName, fnName, func) {
return function() {for (var args=[],$__0=0,$__1=arguments.length;$__0<$__1;$__0++) args.push(arguments[$__0]);
var totalTime;
var rv;
var start;
if (fnName === '_renderNewRootComponent' ||
fnName === 'flushBatchedUpdates') {
// A "measurement" is a set of metrics recorded for each flush. We want
// to group the metrics for a given flush together so we can look at the
// components that rendered and the DOM operations that actually
// happened to determine the amount of "wasted work" performed.
ReactDefaultPerf._allMeasurements.push({
exclusive: {},
inclusive: {},
render: {},
counts: {},
writes: {},
displayNames: {},
totalTime: 0
});
start = performanceNow();
rv = func.apply(this, args);
ReactDefaultPerf._allMeasurements[
ReactDefaultPerf._allMeasurements.length - 1
].totalTime = performanceNow() - start;
return rv;
} else if (fnName === '_mountImageIntoNode' ||
moduleName === 'ReactDOMIDOperations') {
start = performanceNow();
rv = func.apply(this, args);
totalTime = performanceNow() - start;
if (fnName === '_mountImageIntoNode') {
var mountID = ReactMount.getID(args[1]);
ReactDefaultPerf._recordWrite(mountID, fnName, totalTime, args[0]);
} else if (fnName === 'dangerouslyProcessChildrenUpdates') {
// special format
args[0].forEach(function(update) {
var writeArgs = {};
if (update.fromIndex !== null) {
writeArgs.fromIndex = update.fromIndex;
}
if (update.toIndex !== null) {
writeArgs.toIndex = update.toIndex;
}
if (update.textContent !== null) {
writeArgs.textContent = update.textContent;
}
if (update.markupIndex !== null) {
writeArgs.markup = args[1][update.markupIndex];
}
ReactDefaultPerf._recordWrite(
update.parentID,
update.type,
totalTime,
writeArgs
);
});
} else {
// basic format
ReactDefaultPerf._recordWrite(
args[0],
fnName,
totalTime,
Array.prototype.slice.call(args, 1)
);
}
return rv;
} else if (moduleName === 'ReactCompositeComponent' && (
(// TODO: receiveComponent()?
(fnName === 'mountComponent' ||
fnName === 'updateComponent' || fnName === '_renderValidatedComponent')))) {
if (typeof this._currentElement.type === 'string') {
return func.apply(this, args);
}
var rootNodeID = fnName === 'mountComponent' ?
args[0] :
this._rootNodeID;
var isRender = fnName === '_renderValidatedComponent';
var isMount = fnName === 'mountComponent';
var mountStack = ReactDefaultPerf._mountStack;
var entry = ReactDefaultPerf._allMeasurements[
ReactDefaultPerf._allMeasurements.length - 1
];
if (isRender) {
addValue(entry.counts, rootNodeID, 1);
} else if (isMount) {
mountStack.push(0);
}
start = performanceNow();
rv = func.apply(this, args);
totalTime = performanceNow() - start;
if (isRender) {
addValue(entry.render, rootNodeID, totalTime);
} else if (isMount) {
var subMountTime = mountStack.pop();
mountStack[mountStack.length - 1] += totalTime;
addValue(entry.exclusive, rootNodeID, totalTime - subMountTime);
addValue(entry.inclusive, rootNodeID, totalTime);
} else {
addValue(entry.inclusive, rootNodeID, totalTime);
}
entry.displayNames[rootNodeID] = {
current: this.getName(),
owner: this._currentElement._owner ?
this._currentElement._owner.getName() :
'<root>'
};
return rv;
} else {
return func.apply(this, args);
}
};
}
};
module.exports = ReactDefaultPerf;
},{"./DOMProperty":15,"./ReactDefaultPerfAnalysis":66,"./ReactMount":81,"./ReactPerf":86,"./performanceNow":167}],66:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDefaultPerfAnalysis
*/
var assign = require("./Object.assign");
// Don't try to save users less than 1.2ms (a number I made up)
var DONT_CARE_THRESHOLD = 1.2;
var DOM_OPERATION_TYPES = {
'_mountImageIntoNode': 'set innerHTML',
INSERT_MARKUP: 'set innerHTML',
MOVE_EXISTING: 'move',
REMOVE_NODE: 'remove',
TEXT_CONTENT: 'set textContent',
'updatePropertyByID': 'update attribute',
'deletePropertyByID': 'delete attribute',
'updateStylesByID': 'update styles',
'updateInnerHTMLByID': 'set innerHTML',
'dangerouslyReplaceNodeWithMarkupByID': 'replace'
};
function getTotalTime(measurements) {
// TODO: return number of DOM ops? could be misleading.
// TODO: measure dropped frames after reconcile?
// TODO: log total time of each reconcile and the top-level component
// class that triggered it.
var totalTime = 0;
for (var i = 0; i < measurements.length; i++) {
var measurement = measurements[i];
totalTime += measurement.totalTime;
}
return totalTime;
}
function getDOMSummary(measurements) {
var items = [];
for (var i = 0; i < measurements.length; i++) {
var measurement = measurements[i];
var id;
for (id in measurement.writes) {
measurement.writes[id].forEach(function(write) {
items.push({
id: id,
type: DOM_OPERATION_TYPES[write.type] || write.type,
args: write.args
});
});
}
}
return items;
}
function getExclusiveSummary(measurements) {
var candidates = {};
var displayName;
for (var i = 0; i < measurements.length; i++) {
var measurement = measurements[i];
var allIDs = assign(
{},
measurement.exclusive,
measurement.inclusive
);
for (var id in allIDs) {
displayName = measurement.displayNames[id].current;
candidates[displayName] = candidates[displayName] || {
componentName: displayName,
inclusive: 0,
exclusive: 0,
render: 0,
count: 0
};
if (measurement.render[id]) {
candidates[displayName].render += measurement.render[id];
}
if (measurement.exclusive[id]) {
candidates[displayName].exclusive += measurement.exclusive[id];
}
if (measurement.inclusive[id]) {
candidates[displayName].inclusive += measurement.inclusive[id];
}
if (measurement.counts[id]) {
candidates[displayName].count += measurement.counts[id];
}
}
}
// Now make a sorted array with the results.
var arr = [];
for (displayName in candidates) {
if (candidates[displayName].exclusive >= DONT_CARE_THRESHOLD) {
arr.push(candidates[displayName]);
}
}
arr.sort(function(a, b) {
return b.exclusive - a.exclusive;
});
return arr;
}
function getInclusiveSummary(measurements, onlyClean) {
var candidates = {};
var inclusiveKey;
for (var i = 0; i < measurements.length; i++) {
var measurement = measurements[i];
var allIDs = assign(
{},
measurement.exclusive,
measurement.inclusive
);
var cleanComponents;
if (onlyClean) {
cleanComponents = getUnchangedComponents(measurement);
}
for (var id in allIDs) {
if (onlyClean && !cleanComponents[id]) {
continue;
}
var displayName = measurement.displayNames[id];
// Inclusive time is not useful for many components without knowing where
// they are instantiated. So we aggregate inclusive time with both the
// owner and current displayName as the key.
inclusiveKey = displayName.owner + ' > ' + displayName.current;
candidates[inclusiveKey] = candidates[inclusiveKey] || {
componentName: inclusiveKey,
time: 0,
count: 0
};
if (measurement.inclusive[id]) {
candidates[inclusiveKey].time += measurement.inclusive[id];
}
if (measurement.counts[id]) {
candidates[inclusiveKey].count += measurement.counts[id];
}
}
}
// Now make a sorted array with the results.
var arr = [];
for (inclusiveKey in candidates) {
if (candidates[inclusiveKey].time >= DONT_CARE_THRESHOLD) {
arr.push(candidates[inclusiveKey]);
}
}
arr.sort(function(a, b) {
return b.time - a.time;
});
return arr;
}
function getUnchangedComponents(measurement) {
// For a given reconcile, look at which components did not actually
// render anything to the DOM and return a mapping of their ID to
// the amount of time it took to render the entire subtree.
var cleanComponents = {};
var dirtyLeafIDs = Object.keys(measurement.writes);
var allIDs = assign({}, measurement.exclusive, measurement.inclusive);
for (var id in allIDs) {
var isDirty = false;
// For each component that rendered, see if a component that triggered
// a DOM op is in its subtree.
for (var i = 0; i < dirtyLeafIDs.length; i++) {
if (dirtyLeafIDs[i].indexOf(id) === 0) {
isDirty = true;
break;
}
}
if (!isDirty && measurement.counts[id] > 0) {
cleanComponents[id] = true;
}
}
return cleanComponents;
}
var ReactDefaultPerfAnalysis = {
getExclusiveSummary: getExclusiveSummary,
getInclusiveSummary: getInclusiveSummary,
getDOMSummary: getDOMSummary,
getTotalTime: getTotalTime
};
module.exports = ReactDefaultPerfAnalysis;
},{"./Object.assign":33}],67:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactElement
*/
'use strict';
var ReactContext = require("./ReactContext");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var assign = require("./Object.assign");
var warning = require("./warning");
var RESERVED_PROPS = {
key: true,
ref: true
};
/**
* Warn for mutations.
*
* @internal
* @param {object} object
* @param {string} key
*/
function defineWarningProperty(object, key) {
Object.defineProperty(object, key, {
configurable: false,
enumerable: true,
get: function() {
if (!this._store) {
return null;
}
return this._store[key];
},
set: function(value) {
("production" !== "production" ? warning(
false,
'Don\'t set the %s property of the React element. Instead, ' +
'specify the correct value when initially creating the element.',
key
) : null);
this._store[key] = value;
}
});
}
/**
* This is updated to true if the membrane is successfully created.
*/
var useMutationMembrane = false;
/**
* Warn for mutations.
*
* @internal
* @param {object} element
*/
function defineMutationMembrane(prototype) {
try {
var pseudoFrozenProperties = {
props: true
};
for (var key in pseudoFrozenProperties) {
defineWarningProperty(prototype, key);
}
useMutationMembrane = true;
} catch (x) {
// IE will fail on defineProperty
}
}
/**
* Base constructor for all React elements. This is only used to make this
* work with a dynamic instanceof check. Nothing should live on this prototype.
*
* @param {*} type
* @param {string|object} ref
* @param {*} key
* @param {*} props
* @internal
*/
var ReactElement = function(type, key, ref, owner, context, props) {
// Built-in properties that belong on the element
this.type = type;
this.key = key;
this.ref = ref;
// Record the component responsible for creating this element.
this._owner = owner;
// TODO: Deprecate withContext, and then the context becomes accessible
// through the owner.
this._context = context;
if ("production" !== "production") {
// The validation flag and props are currently mutative. We put them on
// an external backing store so that we can freeze the whole object.
// This can be replaced with a WeakMap once they are implemented in
// commonly used development environments.
this._store = {props: props, originalProps: assign({}, props)};
// To make comparing ReactElements easier for testing purposes, we make
// the validation flag non-enumerable (where possible, which should
// include every environment we run tests in), so the test framework
// ignores it.
try {
Object.defineProperty(this._store, 'validated', {
configurable: false,
enumerable: false,
writable: true
});
} catch (x) {
}
this._store.validated = false;
// We're not allowed to set props directly on the object so we early
// return and rely on the prototype membrane to forward to the backing
// store.
if (useMutationMembrane) {
Object.freeze(this);
return;
}
}
this.props = props;
};
// We intentionally don't expose the function on the constructor property.
// ReactElement should be indistinguishable from a plain object.
ReactElement.prototype = {
_isReactElement: true
};
if ("production" !== "production") {
defineMutationMembrane(ReactElement.prototype);
}
ReactElement.createElement = function(type, config, children) {
var propName;
// Reserved names are extracted
var props = {};
var key = null;
var ref = null;
if (config != null) {
ref = config.ref === undefined ? null : config.ref;
key = config.key === undefined ? null : '' + config.key;
// Remaining properties are added to a new props object
for (propName in config) {
if (config.hasOwnProperty(propName) &&
!RESERVED_PROPS.hasOwnProperty(propName)) {
props[propName] = config[propName];
}
}
}
// Children can be more than one argument, and those are transferred onto
// the newly allocated props object.
var childrenLength = arguments.length - 2;
if (childrenLength === 1) {
props.children = children;
} else if (childrenLength > 1) {
var childArray = Array(childrenLength);
for (var i = 0; i < childrenLength; i++) {
childArray[i] = arguments[i + 2];
}
props.children = childArray;
}
// Resolve default props
if (type && type.defaultProps) {
var defaultProps = type.defaultProps;
for (propName in defaultProps) {
if (typeof props[propName] === 'undefined') {
props[propName] = defaultProps[propName];
}
}
}
return new ReactElement(
type,
key,
ref,
ReactCurrentOwner.current,
ReactContext.current,
props
);
};
ReactElement.createFactory = function(type) {
var factory = ReactElement.createElement.bind(null, type);
// Expose the type on the factory and the prototype so that it can be
// easily accessed on elements. E.g. <Foo />.type === Foo.type.
// This should not be named `constructor` since this may not be the function
// that created the element, and it may not even be a constructor.
// Legacy hook TODO: Warn if this is accessed
factory.type = type;
return factory;
};
ReactElement.cloneAndReplaceProps = function(oldElement, newProps) {
var newElement = new ReactElement(
oldElement.type,
oldElement.key,
oldElement.ref,
oldElement._owner,
oldElement._context,
newProps
);
if ("production" !== "production") {
// If the key on the original is valid, then the clone is valid
newElement._store.validated = oldElement._store.validated;
}
return newElement;
};
ReactElement.cloneElement = function(element, config, children) {
var propName;
// Original props are copied
var props = assign({}, element.props);
// Reserved names are extracted
var key = element.key;
var ref = element.ref;
// Owner will be preserved, unless ref is overridden
var owner = element._owner;
if (config != null) {
if (config.ref !== undefined) {
// Silently steal the ref from the parent.
ref = config.ref;
owner = ReactCurrentOwner.current;
}
if (config.key !== undefined) {
key = '' + config.key;
}
// Remaining properties override existing props
for (propName in config) {
if (config.hasOwnProperty(propName) &&
!RESERVED_PROPS.hasOwnProperty(propName)) {
props[propName] = config[propName];
}
}
}
// Children can be more than one argument, and those are transferred onto
// the newly allocated props object.
var childrenLength = arguments.length - 2;
if (childrenLength === 1) {
props.children = children;
} else if (childrenLength > 1) {
var childArray = Array(childrenLength);
for (var i = 0; i < childrenLength; i++) {
childArray[i] = arguments[i + 2];
}
props.children = childArray;
}
return new ReactElement(
element.type,
key,
ref,
owner,
element._context,
props
);
};
/**
* @param {?object} object
* @return {boolean} True if `object` is a valid component.
* @final
*/
ReactElement.isValidElement = function(object) {
// ReactTestUtils is often used outside of beforeEach where as React is
// within it. This leads to two different instances of React on the same
// page. To identify a element from a different React instance we use
// a flag instead of an instanceof check.
var isElement = !!(object && object._isReactElement);
// if (isElement && !(object instanceof ReactElement)) {
// This is an indicator that you're using multiple versions of React at the
// same time. This will screw with ownership and stuff. Fix it, please.
// TODO: We could possibly warn here.
// }
return isElement;
};
module.exports = ReactElement;
},{"./Object.assign":33,"./ReactContext":48,"./ReactCurrentOwner":49,"./warning":176}],68:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactElementValidator
*/
/**
* ReactElementValidator provides a wrapper around a element factory
* which validates the props passed to the element. This is intended to be
* used only in DEV and could be replaced by a static type checker for languages
* that support it.
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactFragment = require("./ReactFragment");
var ReactPropTypeLocations = require("./ReactPropTypeLocations");
var ReactPropTypeLocationNames = require("./ReactPropTypeLocationNames");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactNativeComponent = require("./ReactNativeComponent");
var getIteratorFn = require("./getIteratorFn");
var invariant = require("./invariant");
var warning = require("./warning");
function getDeclarationErrorAddendum() {
if (ReactCurrentOwner.current) {
var name = ReactCurrentOwner.current.getName();
if (name) {
return ' Check the render method of `' + name + '`.';
}
}
return '';
}
/**
* Warn if there's no key explicitly set on dynamic arrays of children or
* object keys are not valid. This allows us to keep track of children between
* updates.
*/
var ownerHasKeyUseWarning = {};
var loggedTypeFailures = {};
var NUMERIC_PROPERTY_REGEX = /^\d+$/;
/**
* Gets the instance's name for use in warnings.
*
* @internal
* @return {?string} Display name or undefined
*/
function getName(instance) {
var publicInstance = instance && instance.getPublicInstance();
if (!publicInstance) {
return undefined;
}
var constructor = publicInstance.constructor;
if (!constructor) {
return undefined;
}
return constructor.displayName || constructor.name || undefined;
}
/**
* Gets the current owner's displayName for use in warnings.
*
* @internal
* @return {?string} Display name or undefined
*/
function getCurrentOwnerDisplayName() {
var current = ReactCurrentOwner.current;
return (
current && getName(current) || undefined
);
}
/**
* Warn if the element doesn't have an explicit key assigned to it.
* This element is in an array. The array could grow and shrink or be
* reordered. All children that haven't already been validated are required to
* have a "key" property assigned to it.
*
* @internal
* @param {ReactElement} element Element that requires a key.
* @param {*} parentType element's parent's type.
*/
function validateExplicitKey(element, parentType) {
if (element._store.validated || element.key != null) {
return;
}
element._store.validated = true;
warnAndMonitorForKeyUse(
'Each child in an array or iterator should have a unique "key" prop.',
element,
parentType
);
}
/**
* Warn if the key is being defined as an object property but has an incorrect
* value.
*
* @internal
* @param {string} name Property name of the key.
* @param {ReactElement} element Component that requires a key.
* @param {*} parentType element's parent's type.
*/
function validatePropertyKey(name, element, parentType) {
if (!NUMERIC_PROPERTY_REGEX.test(name)) {
return;
}
warnAndMonitorForKeyUse(
'Child objects should have non-numeric keys so ordering is preserved.',
element,
parentType
);
}
/**
* Shared warning and monitoring code for the key warnings.
*
* @internal
* @param {string} message The base warning that gets output.
* @param {ReactElement} element Component that requires a key.
* @param {*} parentType element's parent's type.
*/
function warnAndMonitorForKeyUse(message, element, parentType) {
var ownerName = getCurrentOwnerDisplayName();
var parentName = typeof parentType === 'string' ?
parentType : parentType.displayName || parentType.name;
var useName = ownerName || parentName;
var memoizer = ownerHasKeyUseWarning[message] || (
(ownerHasKeyUseWarning[message] = {})
);
if (memoizer.hasOwnProperty(useName)) {
return;
}
memoizer[useName] = true;
var parentOrOwnerAddendum =
ownerName ? (" Check the render method of " + ownerName + ".") :
parentName ? (" Check the React.render call using <" + parentName + ">.") :
'';
// Usually the current owner is the offender, but if it accepts children as a
// property, it may be the creator of the child that's responsible for
// assigning it a key.
var childOwnerAddendum = '';
if (element &&
element._owner &&
element._owner !== ReactCurrentOwner.current) {
// Name of the component that originally created this child.
var childOwnerName = getName(element._owner);
childOwnerAddendum = (" It was passed a child from " + childOwnerName + ".");
}
("production" !== "production" ? warning(
false,
message + '%s%s See https://fb.me/react-warning-keys for more information.',
parentOrOwnerAddendum,
childOwnerAddendum
) : null);
}
/**
* Ensure that every element either is passed in a static location, in an
* array with an explicit keys property defined, or in an object literal
* with valid key property.
*
* @internal
* @param {ReactNode} node Statically passed child of any type.
* @param {*} parentType node's parent's type.
*/
function validateChildKeys(node, parentType) {
if (Array.isArray(node)) {
for (var i = 0; i < node.length; i++) {
var child = node[i];
if (ReactElement.isValidElement(child)) {
validateExplicitKey(child, parentType);
}
}
} else if (ReactElement.isValidElement(node)) {
// This element was passed in a valid location.
node._store.validated = true;
} else if (node) {
var iteratorFn = getIteratorFn(node);
// Entry iterators provide implicit keys.
if (iteratorFn) {
if (iteratorFn !== node.entries) {
var iterator = iteratorFn.call(node);
var step;
while (!(step = iterator.next()).done) {
if (ReactElement.isValidElement(step.value)) {
validateExplicitKey(step.value, parentType);
}
}
}
} else if (typeof node === 'object') {
var fragment = ReactFragment.extractIfFragment(node);
for (var key in fragment) {
if (fragment.hasOwnProperty(key)) {
validatePropertyKey(key, fragment[key], parentType);
}
}
}
}
}
/**
* Assert that the props are valid
*
* @param {string} componentName Name of the component for error messages.
* @param {object} propTypes Map of prop name to a ReactPropType
* @param {object} props
* @param {string} location e.g. "prop", "context", "child context"
* @private
*/
function checkPropTypes(componentName, propTypes, props, location) {
for (var propName in propTypes) {
if (propTypes.hasOwnProperty(propName)) {
var error;
// Prop type validation may throw. In case they do, we don't want to
// fail the render phase where it didn't fail before. So we log it.
// After these have been cleaned up, we'll let them throw.
try {
// This is intentionally an invariant that gets caught. It's the same
// behavior as without this statement except with a better message.
("production" !== "production" ? invariant(
typeof propTypes[propName] === 'function',
'%s: %s type `%s` is invalid; it must be a function, usually from ' +
'React.PropTypes.',
componentName || 'React class',
ReactPropTypeLocationNames[location],
propName
) : invariant(typeof propTypes[propName] === 'function'));
error = propTypes[propName](props, propName, componentName, location);
} catch (ex) {
error = ex;
}
if (error instanceof Error && !(error.message in loggedTypeFailures)) {
// Only monitor this failure once because there tends to be a lot of the
// same error.
loggedTypeFailures[error.message] = true;
var addendum = getDeclarationErrorAddendum(this);
("production" !== "production" ? warning(false, 'Failed propType: %s%s', error.message, addendum) : null);
}
}
}
}
var warnedPropsMutations = {};
/**
* Warn about mutating props when setting `propName` on `element`.
*
* @param {string} propName The string key within props that was set
* @param {ReactElement} element
*/
function warnForPropsMutation(propName, element) {
var type = element.type;
var elementName = typeof type === 'string' ? type : type.displayName;
var ownerName = element._owner ?
element._owner.getPublicInstance().constructor.displayName : null;
var warningKey = propName + '|' + elementName + '|' + ownerName;
if (warnedPropsMutations.hasOwnProperty(warningKey)) {
return;
}
warnedPropsMutations[warningKey] = true;
var elementInfo = '';
if (elementName) {
elementInfo = ' <' + elementName + ' />';
}
var ownerInfo = '';
if (ownerName) {
ownerInfo = ' The element was created by ' + ownerName + '.';
}
("production" !== "production" ? warning(
false,
'Don\'t set .props.%s of the React component%s. Instead, specify the ' +
'correct value when initially creating the element or use ' +
'React.cloneElement to make a new element with updated props.%s',
propName,
elementInfo,
ownerInfo
) : null);
}
// Inline Object.is polyfill
function is(a, b) {
if (a !== a) {
// NaN
return b !== b;
}
if (a === 0 && b === 0) {
// +-0
return 1 / a === 1 / b;
}
return a === b;
}
/**
* Given an element, check if its props have been mutated since element
* creation (or the last call to this function). In particular, check if any
* new props have been added, which we can't directly catch by defining warning
* properties on the props object.
*
* @param {ReactElement} element
*/
function checkAndWarnForMutatedProps(element) {
if (!element._store) {
// Element was created using `new ReactElement` directly or with
// `ReactElement.createElement`; skip mutation checking
return;
}
var originalProps = element._store.originalProps;
var props = element.props;
for (var propName in props) {
if (props.hasOwnProperty(propName)) {
if (!originalProps.hasOwnProperty(propName) ||
!is(originalProps[propName], props[propName])) {
warnForPropsMutation(propName, element);
// Copy over the new value so that the two props objects match again
originalProps[propName] = props[propName];
}
}
}
}
/**
* Given an element, validate that its props follow the propTypes definition,
* provided by the type.
*
* @param {ReactElement} element
*/
function validatePropTypes(element) {
if (element.type == null) {
// This has already warned. Don't throw.
return;
}
// Extract the component class from the element. Converts string types
// to a composite class which may have propTypes.
// TODO: Validating a string's propTypes is not decoupled from the
// rendering target which is problematic.
var componentClass = ReactNativeComponent.getComponentClassForElement(
element
);
var name = componentClass.displayName || componentClass.name;
if (componentClass.propTypes) {
checkPropTypes(
name,
componentClass.propTypes,
element.props,
ReactPropTypeLocations.prop
);
}
if (typeof componentClass.getDefaultProps === 'function') {
("production" !== "production" ? warning(
componentClass.getDefaultProps.isReactClassApproved,
'getDefaultProps is only used on classic React.createClass ' +
'definitions. Use a static property named `defaultProps` instead.'
) : null);
}
}
var ReactElementValidator = {
checkAndWarnForMutatedProps: checkAndWarnForMutatedProps,
createElement: function(type, props, children) {
// We warn in this case but don't throw. We expect the element creation to
// succeed and there will likely be errors in render.
("production" !== "production" ? warning(
type != null,
'React.createElement: type should not be null or undefined. It should ' +
'be a string (for DOM elements) or a ReactClass (for composite ' +
'components).'
) : null);
var element = ReactElement.createElement.apply(this, arguments);
// The result can be nullish if a mock or a custom function is used.
// TODO: Drop this when these are no longer allowed as the type argument.
if (element == null) {
return element;
}
for (var i = 2; i < arguments.length; i++) {
validateChildKeys(arguments[i], type);
}
validatePropTypes(element);
return element;
},
createFactory: function(type) {
var validatedFactory = ReactElementValidator.createElement.bind(
null,
type
);
// Legacy hook TODO: Warn if this is accessed
validatedFactory.type = type;
if ("production" !== "production") {
try {
Object.defineProperty(
validatedFactory,
'type',
{
enumerable: false,
get: function() {
("production" !== "production" ? warning(
false,
'Factory.type is deprecated. Access the class directly ' +
'before passing it to createFactory.'
) : null);
Object.defineProperty(this, 'type', {
value: type
});
return type;
}
}
);
} catch (x) {
// IE will fail on defineProperty (es5-shim/sham too)
}
}
return validatedFactory;
},
cloneElement: function(element, props, children) {
var newElement = ReactElement.cloneElement.apply(this, arguments);
for (var i = 2; i < arguments.length; i++) {
validateChildKeys(arguments[i], newElement.type);
}
validatePropTypes(newElement);
return newElement;
}
};
module.exports = ReactElementValidator;
},{"./ReactCurrentOwner":49,"./ReactElement":67,"./ReactFragment":73,"./ReactNativeComponent":84,"./ReactPropTypeLocationNames":88,"./ReactPropTypeLocations":89,"./getIteratorFn":146,"./invariant":155,"./warning":176}],69:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactEmptyComponent
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactInstanceMap = require("./ReactInstanceMap");
var invariant = require("./invariant");
var component;
// This registry keeps track of the React IDs of the components that rendered to
// `null` (in reality a placeholder such as `noscript`)
var nullComponentIDsRegistry = {};
var ReactEmptyComponentInjection = {
injectEmptyComponent: function(emptyComponent) {
component = ReactElement.createFactory(emptyComponent);
}
};
var ReactEmptyComponentType = function() {};
ReactEmptyComponentType.prototype.componentDidMount = function() {
var internalInstance = ReactInstanceMap.get(this);
// TODO: Make sure we run these methods in the correct order, we shouldn't
// need this check. We're going to assume if we're here it means we ran
// componentWillUnmount already so there is no internal instance (it gets
// removed as part of the unmounting process).
if (!internalInstance) {
return;
}
registerNullComponentID(internalInstance._rootNodeID);
};
ReactEmptyComponentType.prototype.componentWillUnmount = function() {
var internalInstance = ReactInstanceMap.get(this);
// TODO: Get rid of this check. See TODO in componentDidMount.
if (!internalInstance) {
return;
}
deregisterNullComponentID(internalInstance._rootNodeID);
};
ReactEmptyComponentType.prototype.render = function() {
("production" !== "production" ? invariant(
component,
'Trying to return null from a render, but no null placeholder component ' +
'was injected.'
) : invariant(component));
return component();
};
var emptyElement = ReactElement.createElement(ReactEmptyComponentType);
/**
* Mark the component as having rendered to null.
* @param {string} id Component's `_rootNodeID`.
*/
function registerNullComponentID(id) {
nullComponentIDsRegistry[id] = true;
}
/**
* Unmark the component as having rendered to null: it renders to something now.
* @param {string} id Component's `_rootNodeID`.
*/
function deregisterNullComponentID(id) {
delete nullComponentIDsRegistry[id];
}
/**
* @param {string} id Component's `_rootNodeID`.
* @return {boolean} True if the component is rendered to null.
*/
function isNullComponentID(id) {
return !!nullComponentIDsRegistry[id];
}
var ReactEmptyComponent = {
emptyElement: emptyElement,
injection: ReactEmptyComponentInjection,
isNullComponentID: isNullComponentID
};
module.exports = ReactEmptyComponent;
},{"./ReactElement":67,"./ReactInstanceMap":77,"./invariant":155}],70:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactErrorUtils
* @typechecks
*/
"use strict";
var ReactErrorUtils = {
/**
* Creates a guarded version of a function. This is supposed to make debugging
* of event handlers easier. To aid debugging with the browser's debugger,
* this currently simply returns the original function.
*
* @param {function} func Function to be executed
* @param {string} name The name of the guard
* @return {function}
*/
guard: function(func, name) {
return func;
}
};
module.exports = ReactErrorUtils;
},{}],71:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactEventEmitterMixin
*/
'use strict';
var EventPluginHub = require("./EventPluginHub");
function runEventQueueInBatch(events) {
EventPluginHub.enqueueEvents(events);
EventPluginHub.processEventQueue();
}
var ReactEventEmitterMixin = {
/**
* Streams a fired top-level event to `EventPluginHub` where plugins have the
* opportunity to create `ReactEvent`s to be dispatched.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {object} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native environment event.
*/
handleTopLevel: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
var events = EventPluginHub.extractEvents(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent
);
runEventQueueInBatch(events);
}
};
module.exports = ReactEventEmitterMixin;
},{"./EventPluginHub":22}],72:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactEventListener
* @typechecks static-only
*/
'use strict';
var EventListener = require("./EventListener");
var ExecutionEnvironment = require("./ExecutionEnvironment");
var PooledClass = require("./PooledClass");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var ReactMount = require("./ReactMount");
var ReactUpdates = require("./ReactUpdates");
var assign = require("./Object.assign");
var getEventTarget = require("./getEventTarget");
var getUnboundedScrollPosition = require("./getUnboundedScrollPosition");
/**
* Finds the parent React component of `node`.
*
* @param {*} node
* @return {?DOMEventTarget} Parent container, or `null` if the specified node
* is not nested.
*/
function findParent(node) {
// TODO: It may be a good idea to cache this to prevent unnecessary DOM
// traversal, but caching is difficult to do correctly without using a
// mutation observer to listen for all DOM changes.
var nodeID = ReactMount.getID(node);
var rootID = ReactInstanceHandles.getReactRootIDFromNodeID(nodeID);
var container = ReactMount.findReactContainerForID(rootID);
var parent = ReactMount.getFirstReactDOM(container);
return parent;
}
// Used to store ancestor hierarchy in top level callback
function TopLevelCallbackBookKeeping(topLevelType, nativeEvent) {
this.topLevelType = topLevelType;
this.nativeEvent = nativeEvent;
this.ancestors = [];
}
assign(TopLevelCallbackBookKeeping.prototype, {
destructor: function() {
this.topLevelType = null;
this.nativeEvent = null;
this.ancestors.length = 0;
}
});
PooledClass.addPoolingTo(
TopLevelCallbackBookKeeping,
PooledClass.twoArgumentPooler
);
function handleTopLevelImpl(bookKeeping) {
var topLevelTarget = ReactMount.getFirstReactDOM(
getEventTarget(bookKeeping.nativeEvent)
) || window;
// Loop through the hierarchy, in case there's any nested components.
// It's important that we build the array of ancestors before calling any
// event handlers, because event handlers can modify the DOM, leading to
// inconsistencies with ReactMount's node cache. See #1105.
var ancestor = topLevelTarget;
while (ancestor) {
bookKeeping.ancestors.push(ancestor);
ancestor = findParent(ancestor);
}
for (var i = 0, l = bookKeeping.ancestors.length; i < l; i++) {
topLevelTarget = bookKeeping.ancestors[i];
var topLevelTargetID = ReactMount.getID(topLevelTarget) || '';
ReactEventListener._handleTopLevel(
bookKeeping.topLevelType,
topLevelTarget,
topLevelTargetID,
bookKeeping.nativeEvent
);
}
}
function scrollValueMonitor(cb) {
var scrollPosition = getUnboundedScrollPosition(window);
cb(scrollPosition);
}
var ReactEventListener = {
_enabled: true,
_handleTopLevel: null,
WINDOW_HANDLE: ExecutionEnvironment.canUseDOM ? window : null,
setHandleTopLevel: function(handleTopLevel) {
ReactEventListener._handleTopLevel = handleTopLevel;
},
setEnabled: function(enabled) {
ReactEventListener._enabled = !!enabled;
},
isEnabled: function() {
return ReactEventListener._enabled;
},
/**
* Traps top-level events by using event bubbling.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {string} handlerBaseName Event name (e.g. "click").
* @param {object} handle Element on which to attach listener.
* @return {object} An object with a remove function which will forcefully
* remove the listener.
* @internal
*/
trapBubbledEvent: function(topLevelType, handlerBaseName, handle) {
var element = handle;
if (!element) {
return null;
}
return EventListener.listen(
element,
handlerBaseName,
ReactEventListener.dispatchEvent.bind(null, topLevelType)
);
},
/**
* Traps a top-level event by using event capturing.
*
* @param {string} topLevelType Record from `EventConstants`.
* @param {string} handlerBaseName Event name (e.g. "click").
* @param {object} handle Element on which to attach listener.
* @return {object} An object with a remove function which will forcefully
* remove the listener.
* @internal
*/
trapCapturedEvent: function(topLevelType, handlerBaseName, handle) {
var element = handle;
if (!element) {
return null;
}
return EventListener.capture(
element,
handlerBaseName,
ReactEventListener.dispatchEvent.bind(null, topLevelType)
);
},
monitorScrollValue: function(refresh) {
var callback = scrollValueMonitor.bind(null, refresh);
EventListener.listen(window, 'scroll', callback);
},
dispatchEvent: function(topLevelType, nativeEvent) {
if (!ReactEventListener._enabled) {
return;
}
var bookKeeping = TopLevelCallbackBookKeeping.getPooled(
topLevelType,
nativeEvent
);
try {
// Event queue being processed in the same cycle allows
// `preventDefault`.
ReactUpdates.batchedUpdates(handleTopLevelImpl, bookKeeping);
} finally {
TopLevelCallbackBookKeeping.release(bookKeeping);
}
}
};
module.exports = ReactEventListener;
},{"./EventListener":21,"./ExecutionEnvironment":26,"./Object.assign":33,"./PooledClass":34,"./ReactInstanceHandles":76,"./ReactMount":81,"./ReactUpdates":104,"./getEventTarget":145,"./getUnboundedScrollPosition":151}],73:[function(require,module,exports){
/**
* Copyright 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactFragment
*/
'use strict';
var ReactElement = require("./ReactElement");
var warning = require("./warning");
/**
* We used to allow keyed objects to serve as a collection of ReactElements,
* or nested sets. This allowed us a way to explicitly key a set a fragment of
* components. This is now being replaced with an opaque data structure.
* The upgrade path is to call React.addons.createFragment({ key: value }) to
* create a keyed fragment. The resulting data structure is opaque, for now.
*/
if ("production" !== "production") {
var fragmentKey = '_reactFragment';
var didWarnKey = '_reactDidWarn';
var canWarnForReactFragment = false;
try {
// Feature test. Don't even try to issue this warning if we can't use
// enumerable: false.
var dummy = function() {
return 1;
};
Object.defineProperty(
{},
fragmentKey,
{enumerable: false, value: true}
);
Object.defineProperty(
{},
'key',
{enumerable: true, get: dummy}
);
canWarnForReactFragment = true;
} catch (x) { }
var proxyPropertyAccessWithWarning = function(obj, key) {
Object.defineProperty(obj, key, {
enumerable: true,
get: function() {
("production" !== "production" ? warning(
this[didWarnKey],
'A ReactFragment is an opaque type. Accessing any of its ' +
'properties is deprecated. Pass it to one of the React.Children ' +
'helpers.'
) : null);
this[didWarnKey] = true;
return this[fragmentKey][key];
},
set: function(value) {
("production" !== "production" ? warning(
this[didWarnKey],
'A ReactFragment is an immutable opaque type. Mutating its ' +
'properties is deprecated.'
) : null);
this[didWarnKey] = true;
this[fragmentKey][key] = value;
}
});
};
var issuedWarnings = {};
var didWarnForFragment = function(fragment) {
// We use the keys and the type of the value as a heuristic to dedupe the
// warning to avoid spamming too much.
var fragmentCacheKey = '';
for (var key in fragment) {
fragmentCacheKey += key + ':' + (typeof fragment[key]) + ',';
}
var alreadyWarnedOnce = !!issuedWarnings[fragmentCacheKey];
issuedWarnings[fragmentCacheKey] = true;
return alreadyWarnedOnce;
};
}
var ReactFragment = {
// Wrap a keyed object in an opaque proxy that warns you if you access any
// of its properties.
create: function(object) {
if ("production" !== "production") {
if (typeof object !== 'object' || !object || Array.isArray(object)) {
("production" !== "production" ? warning(
false,
'React.addons.createFragment only accepts a single object.',
object
) : null);
return object;
}
if (ReactElement.isValidElement(object)) {
("production" !== "production" ? warning(
false,
'React.addons.createFragment does not accept a ReactElement ' +
'without a wrapper object.'
) : null);
return object;
}
if (canWarnForReactFragment) {
var proxy = {};
Object.defineProperty(proxy, fragmentKey, {
enumerable: false,
value: object
});
Object.defineProperty(proxy, didWarnKey, {
writable: true,
enumerable: false,
value: false
});
for (var key in object) {
proxyPropertyAccessWithWarning(proxy, key);
}
Object.preventExtensions(proxy);
return proxy;
}
}
return object;
},
// Extract the original keyed object from the fragment opaque type. Warn if
// a plain object is passed here.
extract: function(fragment) {
if ("production" !== "production") {
if (canWarnForReactFragment) {
if (!fragment[fragmentKey]) {
("production" !== "production" ? warning(
didWarnForFragment(fragment),
'Any use of a keyed object should be wrapped in ' +
'React.addons.createFragment(object) before being passed as a ' +
'child.'
) : null);
return fragment;
}
return fragment[fragmentKey];
}
}
return fragment;
},
// Check if this is a fragment and if so, extract the keyed object. If it
// is a fragment-like object, warn that it should be wrapped. Ignore if we
// can't determine what kind of object this is.
extractIfFragment: function(fragment) {
if ("production" !== "production") {
if (canWarnForReactFragment) {
// If it is the opaque type, return the keyed object.
if (fragment[fragmentKey]) {
return fragment[fragmentKey];
}
// Otherwise, check each property if it has an element, if it does
// it is probably meant as a fragment, so we can warn early. Defer,
// the warning to extract.
for (var key in fragment) {
if (fragment.hasOwnProperty(key) &&
ReactElement.isValidElement(fragment[key])) {
// This looks like a fragment object, we should provide an
// early warning.
return ReactFragment.extract(fragment);
}
}
}
}
return fragment;
}
};
module.exports = ReactFragment;
},{"./ReactElement":67,"./warning":176}],74:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactInjection
*/
'use strict';
var DOMProperty = require("./DOMProperty");
var EventPluginHub = require("./EventPluginHub");
var ReactComponentEnvironment = require("./ReactComponentEnvironment");
var ReactClass = require("./ReactClass");
var ReactEmptyComponent = require("./ReactEmptyComponent");
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var ReactNativeComponent = require("./ReactNativeComponent");
var ReactDOMComponent = require("./ReactDOMComponent");
var ReactPerf = require("./ReactPerf");
var ReactRootIndex = require("./ReactRootIndex");
var ReactUpdates = require("./ReactUpdates");
var ReactInjection = {
Component: ReactComponentEnvironment.injection,
Class: ReactClass.injection,
DOMComponent: ReactDOMComponent.injection,
DOMProperty: DOMProperty.injection,
EmptyComponent: ReactEmptyComponent.injection,
EventPluginHub: EventPluginHub.injection,
EventEmitter: ReactBrowserEventEmitter.injection,
NativeComponent: ReactNativeComponent.injection,
Perf: ReactPerf.injection,
RootIndex: ReactRootIndex.injection,
Updates: ReactUpdates.injection
};
module.exports = ReactInjection;
},{"./DOMProperty":15,"./EventPluginHub":22,"./ReactBrowserEventEmitter":37,"./ReactClass":42,"./ReactComponentEnvironment":45,"./ReactDOMComponent":52,"./ReactEmptyComponent":69,"./ReactNativeComponent":84,"./ReactPerf":86,"./ReactRootIndex":95,"./ReactUpdates":104}],75:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactInputSelection
*/
'use strict';
var ReactDOMSelection = require("./ReactDOMSelection");
var containsNode = require("./containsNode");
var focusNode = require("./focusNode");
var getActiveElement = require("./getActiveElement");
function isInDocument(node) {
return containsNode(document.documentElement, node);
}
/**
* @ReactInputSelection: React input selection module. Based on Selection.js,
* but modified to be suitable for react and has a couple of bug fixes (doesn't
* assume buttons have range selections allowed).
* Input selection module for React.
*/
var ReactInputSelection = {
hasSelectionCapabilities: function(elem) {
return elem && (
((elem.nodeName === 'INPUT' && elem.type === 'text') ||
elem.nodeName === 'TEXTAREA' || elem.contentEditable === 'true')
);
},
getSelectionInformation: function() {
var focusedElem = getActiveElement();
return {
focusedElem: focusedElem,
selectionRange:
ReactInputSelection.hasSelectionCapabilities(focusedElem) ?
ReactInputSelection.getSelection(focusedElem) :
null
};
},
/**
* @restoreSelection: If any selection information was potentially lost,
* restore it. This is useful when performing operations that could remove dom
* nodes and place them back in, resulting in focus being lost.
*/
restoreSelection: function(priorSelectionInformation) {
var curFocusedElem = getActiveElement();
var priorFocusedElem = priorSelectionInformation.focusedElem;
var priorSelectionRange = priorSelectionInformation.selectionRange;
if (curFocusedElem !== priorFocusedElem &&
isInDocument(priorFocusedElem)) {
if (ReactInputSelection.hasSelectionCapabilities(priorFocusedElem)) {
ReactInputSelection.setSelection(
priorFocusedElem,
priorSelectionRange
);
}
focusNode(priorFocusedElem);
}
},
/**
* @getSelection: Gets the selection bounds of a focused textarea, input or
* contentEditable node.
* -@input: Look up selection bounds of this input
* -@return {start: selectionStart, end: selectionEnd}
*/
getSelection: function(input) {
var selection;
if ('selectionStart' in input) {
// Modern browser with input or textarea.
selection = {
start: input.selectionStart,
end: input.selectionEnd
};
} else if (document.selection && input.nodeName === 'INPUT') {
// IE8 input.
var range = document.selection.createRange();
// There can only be one selection per document in IE, so it must
// be in our element.
if (range.parentElement() === input) {
selection = {
start: -range.moveStart('character', -input.value.length),
end: -range.moveEnd('character', -input.value.length)
};
}
} else {
// Content editable or old IE textarea.
selection = ReactDOMSelection.getOffsets(input);
}
return selection || {start: 0, end: 0};
},
/**
* @setSelection: Sets the selection bounds of a textarea or input and focuses
* the input.
* -@input Set selection bounds of this input or textarea
* -@offsets Object of same form that is returned from get*
*/
setSelection: function(input, offsets) {
var start = offsets.start;
var end = offsets.end;
if (typeof end === 'undefined') {
end = start;
}
if ('selectionStart' in input) {
input.selectionStart = start;
input.selectionEnd = Math.min(end, input.value.length);
} else if (document.selection && input.nodeName === 'INPUT') {
var range = input.createTextRange();
range.collapse(true);
range.moveStart('character', start);
range.moveEnd('character', end - start);
range.select();
} else {
ReactDOMSelection.setOffsets(input, offsets);
}
}
};
module.exports = ReactInputSelection;
},{"./ReactDOMSelection":60,"./containsNode":128,"./focusNode":139,"./getActiveElement":141}],76:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactInstanceHandles
* @typechecks static-only
*/
'use strict';
var ReactRootIndex = require("./ReactRootIndex");
var invariant = require("./invariant");
var SEPARATOR = '.';
var SEPARATOR_LENGTH = SEPARATOR.length;
/**
* Maximum depth of traversals before we consider the possibility of a bad ID.
*/
var MAX_TREE_DEPTH = 100;
/**
* Creates a DOM ID prefix to use when mounting React components.
*
* @param {number} index A unique integer
* @return {string} React root ID.
* @internal
*/
function getReactRootIDString(index) {
return SEPARATOR + index.toString(36);
}
/**
* Checks if a character in the supplied ID is a separator or the end.
*
* @param {string} id A React DOM ID.
* @param {number} index Index of the character to check.
* @return {boolean} True if the character is a separator or end of the ID.
* @private
*/
function isBoundary(id, index) {
return id.charAt(index) === SEPARATOR || index === id.length;
}
/**
* Checks if the supplied string is a valid React DOM ID.
*
* @param {string} id A React DOM ID, maybe.
* @return {boolean} True if the string is a valid React DOM ID.
* @private
*/
function isValidID(id) {
return id === '' || (
id.charAt(0) === SEPARATOR && id.charAt(id.length - 1) !== SEPARATOR
);
}
/**
* Checks if the first ID is an ancestor of or equal to the second ID.
*
* @param {string} ancestorID
* @param {string} descendantID
* @return {boolean} True if `ancestorID` is an ancestor of `descendantID`.
* @internal
*/
function isAncestorIDOf(ancestorID, descendantID) {
return (
descendantID.indexOf(ancestorID) === 0 &&
isBoundary(descendantID, ancestorID.length)
);
}
/**
* Gets the parent ID of the supplied React DOM ID, `id`.
*
* @param {string} id ID of a component.
* @return {string} ID of the parent, or an empty string.
* @private
*/
function getParentID(id) {
return id ? id.substr(0, id.lastIndexOf(SEPARATOR)) : '';
}
/**
* Gets the next DOM ID on the tree path from the supplied `ancestorID` to the
* supplied `destinationID`. If they are equal, the ID is returned.
*
* @param {string} ancestorID ID of an ancestor node of `destinationID`.
* @param {string} destinationID ID of the destination node.
* @return {string} Next ID on the path from `ancestorID` to `destinationID`.
* @private
*/
function getNextDescendantID(ancestorID, destinationID) {
("production" !== "production" ? invariant(
isValidID(ancestorID) && isValidID(destinationID),
'getNextDescendantID(%s, %s): Received an invalid React DOM ID.',
ancestorID,
destinationID
) : invariant(isValidID(ancestorID) && isValidID(destinationID)));
("production" !== "production" ? invariant(
isAncestorIDOf(ancestorID, destinationID),
'getNextDescendantID(...): React has made an invalid assumption about ' +
'the DOM hierarchy. Expected `%s` to be an ancestor of `%s`.',
ancestorID,
destinationID
) : invariant(isAncestorIDOf(ancestorID, destinationID)));
if (ancestorID === destinationID) {
return ancestorID;
}
// Skip over the ancestor and the immediate separator. Traverse until we hit
// another separator or we reach the end of `destinationID`.
var start = ancestorID.length + SEPARATOR_LENGTH;
var i;
for (i = start; i < destinationID.length; i++) {
if (isBoundary(destinationID, i)) {
break;
}
}
return destinationID.substr(0, i);
}
/**
* Gets the nearest common ancestor ID of two IDs.
*
* Using this ID scheme, the nearest common ancestor ID is the longest common
* prefix of the two IDs that immediately preceded a "marker" in both strings.
*
* @param {string} oneID
* @param {string} twoID
* @return {string} Nearest common ancestor ID, or the empty string if none.
* @private
*/
function getFirstCommonAncestorID(oneID, twoID) {
var minLength = Math.min(oneID.length, twoID.length);
if (minLength === 0) {
return '';
}
var lastCommonMarkerIndex = 0;
// Use `<=` to traverse until the "EOL" of the shorter string.
for (var i = 0; i <= minLength; i++) {
if (isBoundary(oneID, i) && isBoundary(twoID, i)) {
lastCommonMarkerIndex = i;
} else if (oneID.charAt(i) !== twoID.charAt(i)) {
break;
}
}
var longestCommonID = oneID.substr(0, lastCommonMarkerIndex);
("production" !== "production" ? invariant(
isValidID(longestCommonID),
'getFirstCommonAncestorID(%s, %s): Expected a valid React DOM ID: %s',
oneID,
twoID,
longestCommonID
) : invariant(isValidID(longestCommonID)));
return longestCommonID;
}
/**
* Traverses the parent path between two IDs (either up or down). The IDs must
* not be the same, and there must exist a parent path between them. If the
* callback returns `false`, traversal is stopped.
*
* @param {?string} start ID at which to start traversal.
* @param {?string} stop ID at which to end traversal.
* @param {function} cb Callback to invoke each ID with.
* @param {?boolean} skipFirst Whether or not to skip the first node.
* @param {?boolean} skipLast Whether or not to skip the last node.
* @private
*/
function traverseParentPath(start, stop, cb, arg, skipFirst, skipLast) {
start = start || '';
stop = stop || '';
("production" !== "production" ? invariant(
start !== stop,
'traverseParentPath(...): Cannot traverse from and to the same ID, `%s`.',
start
) : invariant(start !== stop));
var traverseUp = isAncestorIDOf(stop, start);
("production" !== "production" ? invariant(
traverseUp || isAncestorIDOf(start, stop),
'traverseParentPath(%s, %s, ...): Cannot traverse from two IDs that do ' +
'not have a parent path.',
start,
stop
) : invariant(traverseUp || isAncestorIDOf(start, stop)));
// Traverse from `start` to `stop` one depth at a time.
var depth = 0;
var traverse = traverseUp ? getParentID : getNextDescendantID;
for (var id = start; /* until break */; id = traverse(id, stop)) {
var ret;
if ((!skipFirst || id !== start) && (!skipLast || id !== stop)) {
ret = cb(id, traverseUp, arg);
}
if (ret === false || id === stop) {
// Only break //after// visiting `stop`.
break;
}
("production" !== "production" ? invariant(
depth++ < MAX_TREE_DEPTH,
'traverseParentPath(%s, %s, ...): Detected an infinite loop while ' +
'traversing the React DOM ID tree. This may be due to malformed IDs: %s',
start, stop
) : invariant(depth++ < MAX_TREE_DEPTH));
}
}
/**
* Manages the IDs assigned to DOM representations of React components. This
* uses a specific scheme in order to traverse the DOM efficiently (e.g. in
* order to simulate events).
*
* @internal
*/
var ReactInstanceHandles = {
/**
* Constructs a React root ID
* @return {string} A React root ID.
*/
createReactRootID: function() {
return getReactRootIDString(ReactRootIndex.createReactRootIndex());
},
/**
* Constructs a React ID by joining a root ID with a name.
*
* @param {string} rootID Root ID of a parent component.
* @param {string} name A component's name (as flattened children).
* @return {string} A React ID.
* @internal
*/
createReactID: function(rootID, name) {
return rootID + name;
},
/**
* Gets the DOM ID of the React component that is the root of the tree that
* contains the React component with the supplied DOM ID.
*
* @param {string} id DOM ID of a React component.
* @return {?string} DOM ID of the React component that is the root.
* @internal
*/
getReactRootIDFromNodeID: function(id) {
if (id && id.charAt(0) === SEPARATOR && id.length > 1) {
var index = id.indexOf(SEPARATOR, 1);
return index > -1 ? id.substr(0, index) : id;
}
return null;
},
/**
* Traverses the ID hierarchy and invokes the supplied `cb` on any IDs that
* should would receive a `mouseEnter` or `mouseLeave` event.
*
* NOTE: Does not invoke the callback on the nearest common ancestor because
* nothing "entered" or "left" that element.
*
* @param {string} leaveID ID being left.
* @param {string} enterID ID being entered.
* @param {function} cb Callback to invoke on each entered/left ID.
* @param {*} upArg Argument to invoke the callback with on left IDs.
* @param {*} downArg Argument to invoke the callback with on entered IDs.
* @internal
*/
traverseEnterLeave: function(leaveID, enterID, cb, upArg, downArg) {
var ancestorID = getFirstCommonAncestorID(leaveID, enterID);
if (ancestorID !== leaveID) {
traverseParentPath(leaveID, ancestorID, cb, upArg, false, true);
}
if (ancestorID !== enterID) {
traverseParentPath(ancestorID, enterID, cb, downArg, true, false);
}
},
/**
* Simulates the traversal of a two-phase, capture/bubble event dispatch.
*
* NOTE: This traversal happens on IDs without touching the DOM.
*
* @param {string} targetID ID of the target node.
* @param {function} cb Callback to invoke.
* @param {*} arg Argument to invoke the callback with.
* @internal
*/
traverseTwoPhase: function(targetID, cb, arg) {
if (targetID) {
traverseParentPath('', targetID, cb, arg, true, false);
traverseParentPath(targetID, '', cb, arg, false, true);
}
},
/**
* Traverse a node ID, calling the supplied `cb` for each ancestor ID. For
* example, passing `.0.$row-0.1` would result in `cb` getting called
* with `.0`, `.0.$row-0`, and `.0.$row-0.1`.
*
* NOTE: This traversal happens on IDs without touching the DOM.
*
* @param {string} targetID ID of the target node.
* @param {function} cb Callback to invoke.
* @param {*} arg Argument to invoke the callback with.
* @internal
*/
traverseAncestors: function(targetID, cb, arg) {
traverseParentPath('', targetID, cb, arg, true, false);
},
/**
* Exposed for unit testing.
* @private
*/
_getFirstCommonAncestorID: getFirstCommonAncestorID,
/**
* Exposed for unit testing.
* @private
*/
_getNextDescendantID: getNextDescendantID,
isAncestorIDOf: isAncestorIDOf,
SEPARATOR: SEPARATOR
};
module.exports = ReactInstanceHandles;
},{"./ReactRootIndex":95,"./invariant":155}],77:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactInstanceMap
*/
'use strict';
/**
* `ReactInstanceMap` maintains a mapping from a public facing stateful
* instance (key) and the internal representation (value). This allows public
* methods to accept the user facing instance as an argument and map them back
* to internal methods.
*/
// TODO: Replace this with ES6: var ReactInstanceMap = new Map();
var ReactInstanceMap = {
/**
* This API should be called `delete` but we'd have to make sure to always
* transform these to strings for IE support. When this transform is fully
* supported we can rename it.
*/
remove: function(key) {
key._reactInternalInstance = undefined;
},
get: function(key) {
return key._reactInternalInstance;
},
has: function(key) {
return key._reactInternalInstance !== undefined;
},
set: function(key, value) {
key._reactInternalInstance = value;
}
};
module.exports = ReactInstanceMap;
},{}],78:[function(require,module,exports){
/**
* Copyright 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactLifeCycle
*/
'use strict';
/**
* This module manages the bookkeeping when a component is in the process
* of being mounted or being unmounted. This is used as a way to enforce
* invariants (or warnings) when it is not recommended to call
* setState/forceUpdate.
*
* currentlyMountingInstance: During the construction phase, it is not possible
* to trigger an update since the instance is not fully mounted yet. However, we
* currently allow this as a convenience for mutating the initial state.
*
* currentlyUnmountingInstance: During the unmounting phase, the instance is
* still mounted and can therefore schedule an update. However, this is not
* recommended and probably an error since it's about to be unmounted.
* Therefore we still want to trigger in an error for that case.
*/
var ReactLifeCycle = {
currentlyMountingInstance: null,
currentlyUnmountingInstance: null
};
module.exports = ReactLifeCycle;
},{}],79:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactLink
* @typechecks static-only
*/
'use strict';
/**
* ReactLink encapsulates a common pattern in which a component wants to modify
* a prop received from its parent. ReactLink allows the parent to pass down a
* value coupled with a callback that, when invoked, expresses an intent to
* modify that value. For example:
*
* React.createClass({
* getInitialState: function() {
* return {value: ''};
* },
* render: function() {
* var valueLink = new ReactLink(this.state.value, this._handleValueChange);
* return <input valueLink={valueLink} />;
* },
* this._handleValueChange: function(newValue) {
* this.setState({value: newValue});
* }
* });
*
* We have provided some sugary mixins to make the creation and
* consumption of ReactLink easier; see LinkedValueUtils and LinkedStateMixin.
*/
var React = require("./React");
/**
* @param {*} value current value of the link
* @param {function} requestChange callback to request a change
*/
function ReactLink(value, requestChange) {
this.value = value;
this.requestChange = requestChange;
}
/**
* Creates a PropType that enforces the ReactLink API and optionally checks the
* type of the value being passed inside the link. Example:
*
* MyComponent.propTypes = {
* tabIndexLink: ReactLink.PropTypes.link(React.PropTypes.number)
* }
*/
function createLinkTypeChecker(linkType) {
var shapes = {
value: typeof linkType === 'undefined' ?
React.PropTypes.any.isRequired :
linkType.isRequired,
requestChange: React.PropTypes.func.isRequired
};
return React.PropTypes.shape(shapes);
}
ReactLink.PropTypes = {
link: createLinkTypeChecker
};
module.exports = ReactLink;
},{"./React":35}],80:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactMarkupChecksum
*/
'use strict';
var adler32 = require("./adler32");
var ReactMarkupChecksum = {
CHECKSUM_ATTR_NAME: 'data-react-checksum',
/**
* @param {string} markup Markup string
* @return {string} Markup string with checksum attribute attached
*/
addChecksumToMarkup: function(markup) {
var checksum = adler32(markup);
return markup.replace(
'>',
' ' + ReactMarkupChecksum.CHECKSUM_ATTR_NAME + '="' + checksum + '">'
);
},
/**
* @param {string} markup to use
* @param {DOMElement} element root React element
* @returns {boolean} whether or not the markup is the same
*/
canReuseMarkup: function(markup, element) {
var existingChecksum = element.getAttribute(
ReactMarkupChecksum.CHECKSUM_ATTR_NAME
);
existingChecksum = existingChecksum && parseInt(existingChecksum, 10);
var markupChecksum = adler32(markup);
return markupChecksum === existingChecksum;
}
};
module.exports = ReactMarkupChecksum;
},{"./adler32":124}],81:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactMount
*/
'use strict';
var DOMProperty = require("./DOMProperty");
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactElement = require("./ReactElement");
var ReactElementValidator = require("./ReactElementValidator");
var ReactEmptyComponent = require("./ReactEmptyComponent");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var ReactInstanceMap = require("./ReactInstanceMap");
var ReactMarkupChecksum = require("./ReactMarkupChecksum");
var ReactPerf = require("./ReactPerf");
var ReactReconciler = require("./ReactReconciler");
var ReactUpdateQueue = require("./ReactUpdateQueue");
var ReactUpdates = require("./ReactUpdates");
var emptyObject = require("./emptyObject");
var containsNode = require("./containsNode");
var getReactRootElementInContainer = require("./getReactRootElementInContainer");
var instantiateReactComponent = require("./instantiateReactComponent");
var invariant = require("./invariant");
var setInnerHTML = require("./setInnerHTML");
var shouldUpdateReactComponent = require("./shouldUpdateReactComponent");
var warning = require("./warning");
var SEPARATOR = ReactInstanceHandles.SEPARATOR;
var ATTR_NAME = DOMProperty.ID_ATTRIBUTE_NAME;
var nodeCache = {};
var ELEMENT_NODE_TYPE = 1;
var DOC_NODE_TYPE = 9;
/** Mapping from reactRootID to React component instance. */
var instancesByReactRootID = {};
/** Mapping from reactRootID to `container` nodes. */
var containersByReactRootID = {};
if ("production" !== "production") {
/** __DEV__-only mapping from reactRootID to root elements. */
var rootElementsByReactRootID = {};
}
// Used to store breadth-first search state in findComponentRoot.
var findComponentRootReusableArray = [];
/**
* Finds the index of the first character
* that's not common between the two given strings.
*
* @return {number} the index of the character where the strings diverge
*/
function firstDifferenceIndex(string1, string2) {
var minLen = Math.min(string1.length, string2.length);
for (var i = 0; i < minLen; i++) {
if (string1.charAt(i) !== string2.charAt(i)) {
return i;
}
}
return string1.length === string2.length ? -1 : minLen;
}
/**
* @param {DOMElement} container DOM element that may contain a React component.
* @return {?string} A "reactRoot" ID, if a React component is rendered.
*/
function getReactRootID(container) {
var rootElement = getReactRootElementInContainer(container);
return rootElement && ReactMount.getID(rootElement);
}
/**
* Accessing node[ATTR_NAME] or calling getAttribute(ATTR_NAME) on a form
* element can return its control whose name or ID equals ATTR_NAME. All
* DOM nodes support `getAttributeNode` but this can also get called on
* other objects so just return '' if we're given something other than a
* DOM node (such as window).
*
* @param {?DOMElement|DOMWindow|DOMDocument|DOMTextNode} node DOM node.
* @return {string} ID of the supplied `domNode`.
*/
function getID(node) {
var id = internalGetID(node);
if (id) {
if (nodeCache.hasOwnProperty(id)) {
var cached = nodeCache[id];
if (cached !== node) {
("production" !== "production" ? invariant(
!isValid(cached, id),
'ReactMount: Two valid but unequal nodes with the same `%s`: %s',
ATTR_NAME, id
) : invariant(!isValid(cached, id)));
nodeCache[id] = node;
}
} else {
nodeCache[id] = node;
}
}
return id;
}
function internalGetID(node) {
// If node is something like a window, document, or text node, none of
// which support attributes or a .getAttribute method, gracefully return
// the empty string, as if the attribute were missing.
return node && node.getAttribute && node.getAttribute(ATTR_NAME) || '';
}
/**
* Sets the React-specific ID of the given node.
*
* @param {DOMElement} node The DOM node whose ID will be set.
* @param {string} id The value of the ID attribute.
*/
function setID(node, id) {
var oldID = internalGetID(node);
if (oldID !== id) {
delete nodeCache[oldID];
}
node.setAttribute(ATTR_NAME, id);
nodeCache[id] = node;
}
/**
* Finds the node with the supplied React-generated DOM ID.
*
* @param {string} id A React-generated DOM ID.
* @return {DOMElement} DOM node with the suppled `id`.
* @internal
*/
function getNode(id) {
if (!nodeCache.hasOwnProperty(id) || !isValid(nodeCache[id], id)) {
nodeCache[id] = ReactMount.findReactNodeByID(id);
}
return nodeCache[id];
}
/**
* Finds the node with the supplied public React instance.
*
* @param {*} instance A public React instance.
* @return {?DOMElement} DOM node with the suppled `id`.
* @internal
*/
function getNodeFromInstance(instance) {
var id = ReactInstanceMap.get(instance)._rootNodeID;
if (ReactEmptyComponent.isNullComponentID(id)) {
return null;
}
if (!nodeCache.hasOwnProperty(id) || !isValid(nodeCache[id], id)) {
nodeCache[id] = ReactMount.findReactNodeByID(id);
}
return nodeCache[id];
}
/**
* A node is "valid" if it is contained by a currently mounted container.
*
* This means that the node does not have to be contained by a document in
* order to be considered valid.
*
* @param {?DOMElement} node The candidate DOM node.
* @param {string} id The expected ID of the node.
* @return {boolean} Whether the node is contained by a mounted container.
*/
function isValid(node, id) {
if (node) {
("production" !== "production" ? invariant(
internalGetID(node) === id,
'ReactMount: Unexpected modification of `%s`',
ATTR_NAME
) : invariant(internalGetID(node) === id));
var container = ReactMount.findReactContainerForID(id);
if (container && containsNode(container, node)) {
return true;
}
}
return false;
}
/**
* Causes the cache to forget about one React-specific ID.
*
* @param {string} id The ID to forget.
*/
function purgeID(id) {
delete nodeCache[id];
}
var deepestNodeSoFar = null;
function findDeepestCachedAncestorImpl(ancestorID) {
var ancestor = nodeCache[ancestorID];
if (ancestor && isValid(ancestor, ancestorID)) {
deepestNodeSoFar = ancestor;
} else {
// This node isn't populated in the cache, so presumably none of its
// descendants are. Break out of the loop.
return false;
}
}
/**
* Return the deepest cached node whose ID is a prefix of `targetID`.
*/
function findDeepestCachedAncestor(targetID) {
deepestNodeSoFar = null;
ReactInstanceHandles.traverseAncestors(
targetID,
findDeepestCachedAncestorImpl
);
var foundNode = deepestNodeSoFar;
deepestNodeSoFar = null;
return foundNode;
}
/**
* Mounts this component and inserts it into the DOM.
*
* @param {ReactComponent} componentInstance The instance to mount.
* @param {string} rootID DOM ID of the root node.
* @param {DOMElement} container DOM element to mount into.
* @param {ReactReconcileTransaction} transaction
* @param {boolean} shouldReuseMarkup If true, do not insert markup
*/
function mountComponentIntoNode(
componentInstance,
rootID,
container,
transaction,
shouldReuseMarkup) {
var markup = ReactReconciler.mountComponent(
componentInstance, rootID, transaction, emptyObject
);
componentInstance._isTopLevel = true;
ReactMount._mountImageIntoNode(markup, container, shouldReuseMarkup);
}
/**
* Batched mount.
*
* @param {ReactComponent} componentInstance The instance to mount.
* @param {string} rootID DOM ID of the root node.
* @param {DOMElement} container DOM element to mount into.
* @param {boolean} shouldReuseMarkup If true, do not insert markup
*/
function batchedMountComponentIntoNode(
componentInstance,
rootID,
container,
shouldReuseMarkup) {
var transaction = ReactUpdates.ReactReconcileTransaction.getPooled();
transaction.perform(
mountComponentIntoNode,
null,
componentInstance,
rootID,
container,
transaction,
shouldReuseMarkup
);
ReactUpdates.ReactReconcileTransaction.release(transaction);
}
/**
* Mounting is the process of initializing a React component by creating its
* representative DOM elements and inserting them into a supplied `container`.
* Any prior content inside `container` is destroyed in the process.
*
* ReactMount.render(
* component,
* document.getElementById('container')
* );
*
* <div id="container"> <-- Supplied `container`.
* <div data-reactid=".3"> <-- Rendered reactRoot of React
* // ... component.
* </div>
* </div>
*
* Inside of `container`, the first element rendered is the "reactRoot".
*/
var ReactMount = {
/** Exposed for debugging purposes **/
_instancesByReactRootID: instancesByReactRootID,
/**
* This is a hook provided to support rendering React components while
* ensuring that the apparent scroll position of its `container` does not
* change.
*
* @param {DOMElement} container The `container` being rendered into.
* @param {function} renderCallback This must be called once to do the render.
*/
scrollMonitor: function(container, renderCallback) {
renderCallback();
},
/**
* Take a component that's already mounted into the DOM and replace its props
* @param {ReactComponent} prevComponent component instance already in the DOM
* @param {ReactElement} nextElement component instance to render
* @param {DOMElement} container container to render into
* @param {?function} callback function triggered on completion
*/
_updateRootComponent: function(
prevComponent,
nextElement,
container,
callback) {
if ("production" !== "production") {
ReactElementValidator.checkAndWarnForMutatedProps(nextElement);
}
ReactMount.scrollMonitor(container, function() {
ReactUpdateQueue.enqueueElementInternal(prevComponent, nextElement);
if (callback) {
ReactUpdateQueue.enqueueCallbackInternal(prevComponent, callback);
}
});
if ("production" !== "production") {
// Record the root element in case it later gets transplanted.
rootElementsByReactRootID[getReactRootID(container)] =
getReactRootElementInContainer(container);
}
return prevComponent;
},
/**
* Register a component into the instance map and starts scroll value
* monitoring
* @param {ReactComponent} nextComponent component instance to render
* @param {DOMElement} container container to render into
* @return {string} reactRoot ID prefix
*/
_registerComponent: function(nextComponent, container) {
("production" !== "production" ? invariant(
container && (
(container.nodeType === ELEMENT_NODE_TYPE || container.nodeType === DOC_NODE_TYPE)
),
'_registerComponent(...): Target container is not a DOM element.'
) : invariant(container && (
(container.nodeType === ELEMENT_NODE_TYPE || container.nodeType === DOC_NODE_TYPE)
)));
ReactBrowserEventEmitter.ensureScrollValueMonitoring();
var reactRootID = ReactMount.registerContainer(container);
instancesByReactRootID[reactRootID] = nextComponent;
return reactRootID;
},
/**
* Render a new component into the DOM.
* @param {ReactElement} nextElement element to render
* @param {DOMElement} container container to render into
* @param {boolean} shouldReuseMarkup if we should skip the markup insertion
* @return {ReactComponent} nextComponent
*/
_renderNewRootComponent: function(
nextElement,
container,
shouldReuseMarkup
) {
// Various parts of our code (such as ReactCompositeComponent's
// _renderValidatedComponent) assume that calls to render aren't nested;
// verify that that's the case.
("production" !== "production" ? warning(
ReactCurrentOwner.current == null,
'_renderNewRootComponent(): Render methods should be a pure function ' +
'of props and state; triggering nested component updates from ' +
'render is not allowed. If necessary, trigger nested updates in ' +
'componentDidUpdate.'
) : null);
var componentInstance = instantiateReactComponent(nextElement, null);
var reactRootID = ReactMount._registerComponent(
componentInstance,
container
);
// The initial render is synchronous but any updates that happen during
// rendering, in componentWillMount or componentDidMount, will be batched
// according to the current batching strategy.
ReactUpdates.batchedUpdates(
batchedMountComponentIntoNode,
componentInstance,
reactRootID,
container,
shouldReuseMarkup
);
if ("production" !== "production") {
// Record the root element in case it later gets transplanted.
rootElementsByReactRootID[reactRootID] =
getReactRootElementInContainer(container);
}
return componentInstance;
},
/**
* Renders a React component into the DOM in the supplied `container`.
*
* If the React component was previously rendered into `container`, this will
* perform an update on it and only mutate the DOM as necessary to reflect the
* latest React component.
*
* @param {ReactElement} nextElement Component element to render.
* @param {DOMElement} container DOM element to render into.
* @param {?function} callback function triggered on completion
* @return {ReactComponent} Component instance rendered in `container`.
*/
render: function(nextElement, container, callback) {
("production" !== "production" ? invariant(
ReactElement.isValidElement(nextElement),
'React.render(): Invalid component element.%s',
(
typeof nextElement === 'string' ?
' Instead of passing an element string, make sure to instantiate ' +
'it by passing it to React.createElement.' :
typeof nextElement === 'function' ?
' Instead of passing a component class, make sure to instantiate ' +
'it by passing it to React.createElement.' :
// Check if it quacks like an element
nextElement != null && nextElement.props !== undefined ?
' This may be caused by unintentionally loading two independent ' +
'copies of React.' :
''
)
) : invariant(ReactElement.isValidElement(nextElement)));
var prevComponent = instancesByReactRootID[getReactRootID(container)];
if (prevComponent) {
var prevElement = prevComponent._currentElement;
if (shouldUpdateReactComponent(prevElement, nextElement)) {
return ReactMount._updateRootComponent(
prevComponent,
nextElement,
container,
callback
).getPublicInstance();
} else {
ReactMount.unmountComponentAtNode(container);
}
}
var reactRootElement = getReactRootElementInContainer(container);
var containerHasReactMarkup =
reactRootElement && ReactMount.isRenderedByReact(reactRootElement);
if ("production" !== "production") {
if (!containerHasReactMarkup || reactRootElement.nextSibling) {
var rootElementSibling = reactRootElement;
while (rootElementSibling) {
if (ReactMount.isRenderedByReact(rootElementSibling)) {
("production" !== "production" ? warning(
false,
'render(): Target node has markup rendered by React, but there ' +
'are unrelated nodes as well. This is most commonly caused by ' +
'white-space inserted around server-rendered markup.'
) : null);
break;
}
rootElementSibling = rootElementSibling.nextSibling;
}
}
}
var shouldReuseMarkup = containerHasReactMarkup && !prevComponent;
var component = ReactMount._renderNewRootComponent(
nextElement,
container,
shouldReuseMarkup
).getPublicInstance();
if (callback) {
callback.call(component);
}
return component;
},
/**
* Constructs a component instance of `constructor` with `initialProps` and
* renders it into the supplied `container`.
*
* @param {function} constructor React component constructor.
* @param {?object} props Initial props of the component instance.
* @param {DOMElement} container DOM element to render into.
* @return {ReactComponent} Component instance rendered in `container`.
*/
constructAndRenderComponent: function(constructor, props, container) {
var element = ReactElement.createElement(constructor, props);
return ReactMount.render(element, container);
},
/**
* Constructs a component instance of `constructor` with `initialProps` and
* renders it into a container node identified by supplied `id`.
*
* @param {function} componentConstructor React component constructor
* @param {?object} props Initial props of the component instance.
* @param {string} id ID of the DOM element to render into.
* @return {ReactComponent} Component instance rendered in the container node.
*/
constructAndRenderComponentByID: function(constructor, props, id) {
var domNode = document.getElementById(id);
("production" !== "production" ? invariant(
domNode,
'Tried to get element with id of "%s" but it is not present on the page.',
id
) : invariant(domNode));
return ReactMount.constructAndRenderComponent(constructor, props, domNode);
},
/**
* Registers a container node into which React components will be rendered.
* This also creates the "reactRoot" ID that will be assigned to the element
* rendered within.
*
* @param {DOMElement} container DOM element to register as a container.
* @return {string} The "reactRoot" ID of elements rendered within.
*/
registerContainer: function(container) {
var reactRootID = getReactRootID(container);
if (reactRootID) {
// If one exists, make sure it is a valid "reactRoot" ID.
reactRootID = ReactInstanceHandles.getReactRootIDFromNodeID(reactRootID);
}
if (!reactRootID) {
// No valid "reactRoot" ID found, create one.
reactRootID = ReactInstanceHandles.createReactRootID();
}
containersByReactRootID[reactRootID] = container;
return reactRootID;
},
/**
* Unmounts and destroys the React component rendered in the `container`.
*
* @param {DOMElement} container DOM element containing a React component.
* @return {boolean} True if a component was found in and unmounted from
* `container`
*/
unmountComponentAtNode: function(container) {
// Various parts of our code (such as ReactCompositeComponent's
// _renderValidatedComponent) assume that calls to render aren't nested;
// verify that that's the case. (Strictly speaking, unmounting won't cause a
// render but we still don't expect to be in a render call here.)
("production" !== "production" ? warning(
ReactCurrentOwner.current == null,
'unmountComponentAtNode(): Render methods should be a pure function of ' +
'props and state; triggering nested component updates from render is ' +
'not allowed. If necessary, trigger nested updates in ' +
'componentDidUpdate.'
) : null);
("production" !== "production" ? invariant(
container && (
(container.nodeType === ELEMENT_NODE_TYPE || container.nodeType === DOC_NODE_TYPE)
),
'unmountComponentAtNode(...): Target container is not a DOM element.'
) : invariant(container && (
(container.nodeType === ELEMENT_NODE_TYPE || container.nodeType === DOC_NODE_TYPE)
)));
var reactRootID = getReactRootID(container);
var component = instancesByReactRootID[reactRootID];
if (!component) {
return false;
}
ReactMount.unmountComponentFromNode(component, container);
delete instancesByReactRootID[reactRootID];
delete containersByReactRootID[reactRootID];
if ("production" !== "production") {
delete rootElementsByReactRootID[reactRootID];
}
return true;
},
/**
* Unmounts a component and removes it from the DOM.
*
* @param {ReactComponent} instance React component instance.
* @param {DOMElement} container DOM element to unmount from.
* @final
* @internal
* @see {ReactMount.unmountComponentAtNode}
*/
unmountComponentFromNode: function(instance, container) {
ReactReconciler.unmountComponent(instance);
if (container.nodeType === DOC_NODE_TYPE) {
container = container.documentElement;
}
// http://jsperf.com/emptying-a-node
while (container.lastChild) {
container.removeChild(container.lastChild);
}
},
/**
* Finds the container DOM element that contains React component to which the
* supplied DOM `id` belongs.
*
* @param {string} id The ID of an element rendered by a React component.
* @return {?DOMElement} DOM element that contains the `id`.
*/
findReactContainerForID: function(id) {
var reactRootID = ReactInstanceHandles.getReactRootIDFromNodeID(id);
var container = containersByReactRootID[reactRootID];
if ("production" !== "production") {
var rootElement = rootElementsByReactRootID[reactRootID];
if (rootElement && rootElement.parentNode !== container) {
("production" !== "production" ? invariant(
// Call internalGetID here because getID calls isValid which calls
// findReactContainerForID (this function).
internalGetID(rootElement) === reactRootID,
'ReactMount: Root element ID differed from reactRootID.'
) : invariant(// Call internalGetID here because getID calls isValid which calls
// findReactContainerForID (this function).
internalGetID(rootElement) === reactRootID));
var containerChild = container.firstChild;
if (containerChild &&
reactRootID === internalGetID(containerChild)) {
// If the container has a new child with the same ID as the old
// root element, then rootElementsByReactRootID[reactRootID] is
// just stale and needs to be updated. The case that deserves a
// warning is when the container is empty.
rootElementsByReactRootID[reactRootID] = containerChild;
} else {
("production" !== "production" ? warning(
false,
'ReactMount: Root element has been removed from its original ' +
'container. New container:', rootElement.parentNode
) : null);
}
}
}
return container;
},
/**
* Finds an element rendered by React with the supplied ID.
*
* @param {string} id ID of a DOM node in the React component.
* @return {DOMElement} Root DOM node of the React component.
*/
findReactNodeByID: function(id) {
var reactRoot = ReactMount.findReactContainerForID(id);
return ReactMount.findComponentRoot(reactRoot, id);
},
/**
* True if the supplied `node` is rendered by React.
*
* @param {*} node DOM Element to check.
* @return {boolean} True if the DOM Element appears to be rendered by React.
* @internal
*/
isRenderedByReact: function(node) {
if (node.nodeType !== 1) {
// Not a DOMElement, therefore not a React component
return false;
}
var id = ReactMount.getID(node);
return id ? id.charAt(0) === SEPARATOR : false;
},
/**
* Traverses up the ancestors of the supplied node to find a node that is a
* DOM representation of a React component.
*
* @param {*} node
* @return {?DOMEventTarget}
* @internal
*/
getFirstReactDOM: function(node) {
var current = node;
while (current && current.parentNode !== current) {
if (ReactMount.isRenderedByReact(current)) {
return current;
}
current = current.parentNode;
}
return null;
},
/**
* Finds a node with the supplied `targetID` inside of the supplied
* `ancestorNode`. Exploits the ID naming scheme to perform the search
* quickly.
*
* @param {DOMEventTarget} ancestorNode Search from this root.
* @pararm {string} targetID ID of the DOM representation of the component.
* @return {DOMEventTarget} DOM node with the supplied `targetID`.
* @internal
*/
findComponentRoot: function(ancestorNode, targetID) {
var firstChildren = findComponentRootReusableArray;
var childIndex = 0;
var deepestAncestor = findDeepestCachedAncestor(targetID) || ancestorNode;
firstChildren[0] = deepestAncestor.firstChild;
firstChildren.length = 1;
while (childIndex < firstChildren.length) {
var child = firstChildren[childIndex++];
var targetChild;
while (child) {
var childID = ReactMount.getID(child);
if (childID) {
// Even if we find the node we're looking for, we finish looping
// through its siblings to ensure they're cached so that we don't have
// to revisit this node again. Otherwise, we make n^2 calls to getID
// when visiting the many children of a single node in order.
if (targetID === childID) {
targetChild = child;
} else if (ReactInstanceHandles.isAncestorIDOf(childID, targetID)) {
// If we find a child whose ID is an ancestor of the given ID,
// then we can be sure that we only want to search the subtree
// rooted at this child, so we can throw out the rest of the
// search state.
firstChildren.length = childIndex = 0;
firstChildren.push(child.firstChild);
}
} else {
// If this child had no ID, then there's a chance that it was
// injected automatically by the browser, as when a `<table>`
// element sprouts an extra `<tbody>` child as a side effect of
// `.innerHTML` parsing. Optimistically continue down this
// branch, but not before examining the other siblings.
firstChildren.push(child.firstChild);
}
child = child.nextSibling;
}
if (targetChild) {
// Emptying firstChildren/findComponentRootReusableArray is
// not necessary for correctness, but it helps the GC reclaim
// any nodes that were left at the end of the search.
firstChildren.length = 0;
return targetChild;
}
}
firstChildren.length = 0;
("production" !== "production" ? invariant(
false,
'findComponentRoot(..., %s): Unable to find element. This probably ' +
'means the DOM was unexpectedly mutated (e.g., by the browser), ' +
'usually due to forgetting a <tbody> when using tables, nesting tags ' +
'like <form>, <p>, or <a>, or using non-SVG elements in an <svg> ' +
'parent. ' +
'Try inspecting the child nodes of the element with React ID `%s`.',
targetID,
ReactMount.getID(ancestorNode)
) : invariant(false));
},
_mountImageIntoNode: function(markup, container, shouldReuseMarkup) {
("production" !== "production" ? invariant(
container && (
(container.nodeType === ELEMENT_NODE_TYPE || container.nodeType === DOC_NODE_TYPE)
),
'mountComponentIntoNode(...): Target container is not valid.'
) : invariant(container && (
(container.nodeType === ELEMENT_NODE_TYPE || container.nodeType === DOC_NODE_TYPE)
)));
if (shouldReuseMarkup) {
var rootElement = getReactRootElementInContainer(container);
if (ReactMarkupChecksum.canReuseMarkup(markup, rootElement)) {
return;
} else {
var checksum = rootElement.getAttribute(
ReactMarkupChecksum.CHECKSUM_ATTR_NAME
);
rootElement.removeAttribute(ReactMarkupChecksum.CHECKSUM_ATTR_NAME);
var rootMarkup = rootElement.outerHTML;
rootElement.setAttribute(
ReactMarkupChecksum.CHECKSUM_ATTR_NAME,
checksum
);
var diffIndex = firstDifferenceIndex(markup, rootMarkup);
var difference = ' (client) ' +
markup.substring(diffIndex - 20, diffIndex + 20) +
'\n (server) ' + rootMarkup.substring(diffIndex - 20, diffIndex + 20);
("production" !== "production" ? invariant(
container.nodeType !== DOC_NODE_TYPE,
'You\'re trying to render a component to the document using ' +
'server rendering but the checksum was invalid. This usually ' +
'means you rendered a different component type or props on ' +
'the client from the one on the server, or your render() ' +
'methods are impure. React cannot handle this case due to ' +
'cross-browser quirks by rendering at the document root. You ' +
'should look for environment dependent code in your components ' +
'and ensure the props are the same client and server side:\n%s',
difference
) : invariant(container.nodeType !== DOC_NODE_TYPE));
if ("production" !== "production") {
("production" !== "production" ? warning(
false,
'React attempted to reuse markup in a container but the ' +
'checksum was invalid. This generally means that you are ' +
'using server rendering and the markup generated on the ' +
'server was not what the client was expecting. React injected ' +
'new markup to compensate which works but you have lost many ' +
'of the benefits of server rendering. Instead, figure out ' +
'why the markup being generated is different on the client ' +
'or server:\n%s',
difference
) : null);
}
}
}
("production" !== "production" ? invariant(
container.nodeType !== DOC_NODE_TYPE,
'You\'re trying to render a component to the document but ' +
'you didn\'t use server rendering. We can\'t do this ' +
'without using server rendering due to cross-browser quirks. ' +
'See React.renderToString() for server rendering.'
) : invariant(container.nodeType !== DOC_NODE_TYPE));
setInnerHTML(container, markup);
},
/**
* React ID utilities.
*/
getReactRootID: getReactRootID,
getID: getID,
setID: setID,
getNode: getNode,
getNodeFromInstance: getNodeFromInstance,
purgeID: purgeID
};
ReactPerf.measureMethods(ReactMount, 'ReactMount', {
_renderNewRootComponent: '_renderNewRootComponent',
_mountImageIntoNode: '_mountImageIntoNode'
});
module.exports = ReactMount;
},{"./DOMProperty":15,"./ReactBrowserEventEmitter":37,"./ReactCurrentOwner":49,"./ReactElement":67,"./ReactElementValidator":68,"./ReactEmptyComponent":69,"./ReactInstanceHandles":76,"./ReactInstanceMap":77,"./ReactMarkupChecksum":80,"./ReactPerf":86,"./ReactReconciler":93,"./ReactUpdateQueue":103,"./ReactUpdates":104,"./containsNode":128,"./emptyObject":135,"./getReactRootElementInContainer":149,"./instantiateReactComponent":154,"./invariant":155,"./setInnerHTML":169,"./shouldUpdateReactComponent":172,"./warning":176}],82:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactMultiChild
* @typechecks static-only
*/
'use strict';
var ReactComponentEnvironment = require("./ReactComponentEnvironment");
var ReactMultiChildUpdateTypes = require("./ReactMultiChildUpdateTypes");
var ReactReconciler = require("./ReactReconciler");
var ReactChildReconciler = require("./ReactChildReconciler");
/**
* Updating children of a component may trigger recursive updates. The depth is
* used to batch recursive updates to render markup more efficiently.
*
* @type {number}
* @private
*/
var updateDepth = 0;
/**
* Queue of update configuration objects.
*
* Each object has a `type` property that is in `ReactMultiChildUpdateTypes`.
*
* @type {array<object>}
* @private
*/
var updateQueue = [];
/**
* Queue of markup to be rendered.
*
* @type {array<string>}
* @private
*/
var markupQueue = [];
/**
* Enqueues markup to be rendered and inserted at a supplied index.
*
* @param {string} parentID ID of the parent component.
* @param {string} markup Markup that renders into an element.
* @param {number} toIndex Destination index.
* @private
*/
function enqueueMarkup(parentID, markup, toIndex) {
// NOTE: Null values reduce hidden classes.
updateQueue.push({
parentID: parentID,
parentNode: null,
type: ReactMultiChildUpdateTypes.INSERT_MARKUP,
markupIndex: markupQueue.push(markup) - 1,
textContent: null,
fromIndex: null,
toIndex: toIndex
});
}
/**
* Enqueues moving an existing element to another index.
*
* @param {string} parentID ID of the parent component.
* @param {number} fromIndex Source index of the existing element.
* @param {number} toIndex Destination index of the element.
* @private
*/
function enqueueMove(parentID, fromIndex, toIndex) {
// NOTE: Null values reduce hidden classes.
updateQueue.push({
parentID: parentID,
parentNode: null,
type: ReactMultiChildUpdateTypes.MOVE_EXISTING,
markupIndex: null,
textContent: null,
fromIndex: fromIndex,
toIndex: toIndex
});
}
/**
* Enqueues removing an element at an index.
*
* @param {string} parentID ID of the parent component.
* @param {number} fromIndex Index of the element to remove.
* @private
*/
function enqueueRemove(parentID, fromIndex) {
// NOTE: Null values reduce hidden classes.
updateQueue.push({
parentID: parentID,
parentNode: null,
type: ReactMultiChildUpdateTypes.REMOVE_NODE,
markupIndex: null,
textContent: null,
fromIndex: fromIndex,
toIndex: null
});
}
/**
* Enqueues setting the text content.
*
* @param {string} parentID ID of the parent component.
* @param {string} textContent Text content to set.
* @private
*/
function enqueueTextContent(parentID, textContent) {
// NOTE: Null values reduce hidden classes.
updateQueue.push({
parentID: parentID,
parentNode: null,
type: ReactMultiChildUpdateTypes.TEXT_CONTENT,
markupIndex: null,
textContent: textContent,
fromIndex: null,
toIndex: null
});
}
/**
* Processes any enqueued updates.
*
* @private
*/
function processQueue() {
if (updateQueue.length) {
ReactComponentEnvironment.processChildrenUpdates(
updateQueue,
markupQueue
);
clearQueue();
}
}
/**
* Clears any enqueued updates.
*
* @private
*/
function clearQueue() {
updateQueue.length = 0;
markupQueue.length = 0;
}
/**
* ReactMultiChild are capable of reconciling multiple children.
*
* @class ReactMultiChild
* @internal
*/
var ReactMultiChild = {
/**
* Provides common functionality for components that must reconcile multiple
* children. This is used by `ReactDOMComponent` to mount, update, and
* unmount child components.
*
* @lends {ReactMultiChild.prototype}
*/
Mixin: {
/**
* Generates a "mount image" for each of the supplied children. In the case
* of `ReactDOMComponent`, a mount image is a string of markup.
*
* @param {?object} nestedChildren Nested child maps.
* @return {array} An array of mounted representations.
* @internal
*/
mountChildren: function(nestedChildren, transaction, context) {
var children = ReactChildReconciler.instantiateChildren(
nestedChildren, transaction, context
);
this._renderedChildren = children;
var mountImages = [];
var index = 0;
for (var name in children) {
if (children.hasOwnProperty(name)) {
var child = children[name];
// Inlined for performance, see `ReactInstanceHandles.createReactID`.
var rootID = this._rootNodeID + name;
var mountImage = ReactReconciler.mountComponent(
child,
rootID,
transaction,
context
);
child._mountIndex = index;
mountImages.push(mountImage);
index++;
}
}
return mountImages;
},
/**
* Replaces any rendered children with a text content string.
*
* @param {string} nextContent String of content.
* @internal
*/
updateTextContent: function(nextContent) {
updateDepth++;
var errorThrown = true;
try {
var prevChildren = this._renderedChildren;
// Remove any rendered children.
ReactChildReconciler.unmountChildren(prevChildren);
// TODO: The setTextContent operation should be enough
for (var name in prevChildren) {
if (prevChildren.hasOwnProperty(name)) {
this._unmountChildByName(prevChildren[name], name);
}
}
// Set new text content.
this.setTextContent(nextContent);
errorThrown = false;
} finally {
updateDepth--;
if (!updateDepth) {
if (errorThrown) {
clearQueue();
} else {
processQueue();
}
}
}
},
/**
* Updates the rendered children with new children.
*
* @param {?object} nextNestedChildren Nested child maps.
* @param {ReactReconcileTransaction} transaction
* @internal
*/
updateChildren: function(nextNestedChildren, transaction, context) {
updateDepth++;
var errorThrown = true;
try {
this._updateChildren(nextNestedChildren, transaction, context);
errorThrown = false;
} finally {
updateDepth--;
if (!updateDepth) {
if (errorThrown) {
clearQueue();
} else {
processQueue();
}
}
}
},
/**
* Improve performance by isolating this hot code path from the try/catch
* block in `updateChildren`.
*
* @param {?object} nextNestedChildren Nested child maps.
* @param {ReactReconcileTransaction} transaction
* @final
* @protected
*/
_updateChildren: function(nextNestedChildren, transaction, context) {
var prevChildren = this._renderedChildren;
var nextChildren = ReactChildReconciler.updateChildren(
prevChildren, nextNestedChildren, transaction, context
);
this._renderedChildren = nextChildren;
if (!nextChildren && !prevChildren) {
return;
}
var name;
// `nextIndex` will increment for each child in `nextChildren`, but
// `lastIndex` will be the last index visited in `prevChildren`.
var lastIndex = 0;
var nextIndex = 0;
for (name in nextChildren) {
if (!nextChildren.hasOwnProperty(name)) {
continue;
}
var prevChild = prevChildren && prevChildren[name];
var nextChild = nextChildren[name];
if (prevChild === nextChild) {
this.moveChild(prevChild, nextIndex, lastIndex);
lastIndex = Math.max(prevChild._mountIndex, lastIndex);
prevChild._mountIndex = nextIndex;
} else {
if (prevChild) {
// Update `lastIndex` before `_mountIndex` gets unset by unmounting.
lastIndex = Math.max(prevChild._mountIndex, lastIndex);
this._unmountChildByName(prevChild, name);
}
// The child must be instantiated before it's mounted.
this._mountChildByNameAtIndex(
nextChild, name, nextIndex, transaction, context
);
}
nextIndex++;
}
// Remove children that are no longer present.
for (name in prevChildren) {
if (prevChildren.hasOwnProperty(name) &&
!(nextChildren && nextChildren.hasOwnProperty(name))) {
this._unmountChildByName(prevChildren[name], name);
}
}
},
/**
* Unmounts all rendered children. This should be used to clean up children
* when this component is unmounted.
*
* @internal
*/
unmountChildren: function() {
var renderedChildren = this._renderedChildren;
ReactChildReconciler.unmountChildren(renderedChildren);
this._renderedChildren = null;
},
/**
* Moves a child component to the supplied index.
*
* @param {ReactComponent} child Component to move.
* @param {number} toIndex Destination index of the element.
* @param {number} lastIndex Last index visited of the siblings of `child`.
* @protected
*/
moveChild: function(child, toIndex, lastIndex) {
// If the index of `child` is less than `lastIndex`, then it needs to
// be moved. Otherwise, we do not need to move it because a child will be
// inserted or moved before `child`.
if (child._mountIndex < lastIndex) {
enqueueMove(this._rootNodeID, child._mountIndex, toIndex);
}
},
/**
* Creates a child component.
*
* @param {ReactComponent} child Component to create.
* @param {string} mountImage Markup to insert.
* @protected
*/
createChild: function(child, mountImage) {
enqueueMarkup(this._rootNodeID, mountImage, child._mountIndex);
},
/**
* Removes a child component.
*
* @param {ReactComponent} child Child to remove.
* @protected
*/
removeChild: function(child) {
enqueueRemove(this._rootNodeID, child._mountIndex);
},
/**
* Sets this text content string.
*
* @param {string} textContent Text content to set.
* @protected
*/
setTextContent: function(textContent) {
enqueueTextContent(this._rootNodeID, textContent);
},
/**
* Mounts a child with the supplied name.
*
* NOTE: This is part of `updateChildren` and is here for readability.
*
* @param {ReactComponent} child Component to mount.
* @param {string} name Name of the child.
* @param {number} index Index at which to insert the child.
* @param {ReactReconcileTransaction} transaction
* @private
*/
_mountChildByNameAtIndex: function(
child,
name,
index,
transaction,
context) {
// Inlined for performance, see `ReactInstanceHandles.createReactID`.
var rootID = this._rootNodeID + name;
var mountImage = ReactReconciler.mountComponent(
child,
rootID,
transaction,
context
);
child._mountIndex = index;
this.createChild(child, mountImage);
},
/**
* Unmounts a rendered child by name.
*
* NOTE: This is part of `updateChildren` and is here for readability.
*
* @param {ReactComponent} child Component to unmount.
* @param {string} name Name of the child in `this._renderedChildren`.
* @private
*/
_unmountChildByName: function(child, name) {
this.removeChild(child);
child._mountIndex = null;
}
}
};
module.exports = ReactMultiChild;
},{"./ReactChildReconciler":40,"./ReactComponentEnvironment":45,"./ReactMultiChildUpdateTypes":83,"./ReactReconciler":93}],83:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactMultiChildUpdateTypes
*/
'use strict';
var keyMirror = require("./keyMirror");
/**
* When a component's children are updated, a series of update configuration
* objects are created in order to batch and serialize the required changes.
*
* Enumerates all the possible types of update configurations.
*
* @internal
*/
var ReactMultiChildUpdateTypes = keyMirror({
INSERT_MARKUP: null,
MOVE_EXISTING: null,
REMOVE_NODE: null,
TEXT_CONTENT: null
});
module.exports = ReactMultiChildUpdateTypes;
},{"./keyMirror":161}],84:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactNativeComponent
*/
'use strict';
var assign = require("./Object.assign");
var invariant = require("./invariant");
var autoGenerateWrapperClass = null;
var genericComponentClass = null;
// This registry keeps track of wrapper classes around native tags
var tagToComponentClass = {};
var textComponentClass = null;
var ReactNativeComponentInjection = {
// This accepts a class that receives the tag string. This is a catch all
// that can render any kind of tag.
injectGenericComponentClass: function(componentClass) {
genericComponentClass = componentClass;
},
// This accepts a text component class that takes the text string to be
// rendered as props.
injectTextComponentClass: function(componentClass) {
textComponentClass = componentClass;
},
// This accepts a keyed object with classes as values. Each key represents a
// tag. That particular tag will use this class instead of the generic one.
injectComponentClasses: function(componentClasses) {
assign(tagToComponentClass, componentClasses);
},
// Temporary hack since we expect DOM refs to behave like composites,
// for this release.
injectAutoWrapper: function(wrapperFactory) {
autoGenerateWrapperClass = wrapperFactory;
}
};
/**
* Get a composite component wrapper class for a specific tag.
*
* @param {ReactElement} element The tag for which to get the class.
* @return {function} The React class constructor function.
*/
function getComponentClassForElement(element) {
if (typeof element.type === 'function') {
return element.type;
}
var tag = element.type;
var componentClass = tagToComponentClass[tag];
if (componentClass == null) {
tagToComponentClass[tag] = componentClass = autoGenerateWrapperClass(tag);
}
return componentClass;
}
/**
* Get a native internal component class for a specific tag.
*
* @param {ReactElement} element The element to create.
* @return {function} The internal class constructor function.
*/
function createInternalComponent(element) {
("production" !== "production" ? invariant(
genericComponentClass,
'There is no registered component for the tag %s',
element.type
) : invariant(genericComponentClass));
return new genericComponentClass(element.type, element.props);
}
/**
* @param {ReactText} text
* @return {ReactComponent}
*/
function createInstanceForText(text) {
return new textComponentClass(text);
}
/**
* @param {ReactComponent} component
* @return {boolean}
*/
function isTextComponent(component) {
return component instanceof textComponentClass;
}
var ReactNativeComponent = {
getComponentClassForElement: getComponentClassForElement,
createInternalComponent: createInternalComponent,
createInstanceForText: createInstanceForText,
isTextComponent: isTextComponent,
injection: ReactNativeComponentInjection
};
module.exports = ReactNativeComponent;
},{"./Object.assign":33,"./invariant":155}],85:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactOwner
*/
'use strict';
var invariant = require("./invariant");
/**
* ReactOwners are capable of storing references to owned components.
*
* All components are capable of //being// referenced by owner components, but
* only ReactOwner components are capable of //referencing// owned components.
* The named reference is known as a "ref".
*
* Refs are available when mounted and updated during reconciliation.
*
* var MyComponent = React.createClass({
* render: function() {
* return (
* <div onClick={this.handleClick}>
* <CustomComponent ref="custom" />
* </div>
* );
* },
* handleClick: function() {
* this.refs.custom.handleClick();
* },
* componentDidMount: function() {
* this.refs.custom.initialize();
* }
* });
*
* Refs should rarely be used. When refs are used, they should only be done to
* control data that is not handled by React's data flow.
*
* @class ReactOwner
*/
var ReactOwner = {
/**
* @param {?object} object
* @return {boolean} True if `object` is a valid owner.
* @final
*/
isValidOwner: function(object) {
return !!(
(object &&
typeof object.attachRef === 'function' && typeof object.detachRef === 'function')
);
},
/**
* Adds a component by ref to an owner component.
*
* @param {ReactComponent} component Component to reference.
* @param {string} ref Name by which to refer to the component.
* @param {ReactOwner} owner Component on which to record the ref.
* @final
* @internal
*/
addComponentAsRefTo: function(component, ref, owner) {
("production" !== "production" ? invariant(
ReactOwner.isValidOwner(owner),
'addComponentAsRefTo(...): Only a ReactOwner can have refs. This ' +
'usually means that you\'re trying to add a ref to a component that ' +
'doesn\'t have an owner (that is, was not created inside of another ' +
'component\'s `render` method). Try rendering this component inside of ' +
'a new top-level component which will hold the ref.'
) : invariant(ReactOwner.isValidOwner(owner)));
owner.attachRef(ref, component);
},
/**
* Removes a component by ref from an owner component.
*
* @param {ReactComponent} component Component to dereference.
* @param {string} ref Name of the ref to remove.
* @param {ReactOwner} owner Component on which the ref is recorded.
* @final
* @internal
*/
removeComponentAsRefFrom: function(component, ref, owner) {
("production" !== "production" ? invariant(
ReactOwner.isValidOwner(owner),
'removeComponentAsRefFrom(...): Only a ReactOwner can have refs. This ' +
'usually means that you\'re trying to remove a ref to a component that ' +
'doesn\'t have an owner (that is, was not created inside of another ' +
'component\'s `render` method). Try rendering this component inside of ' +
'a new top-level component which will hold the ref.'
) : invariant(ReactOwner.isValidOwner(owner)));
// Check that `component` is still the current ref because we do not want to
// detach the ref if another component stole it.
if (owner.getPublicInstance().refs[ref] === component.getPublicInstance()) {
owner.detachRef(ref);
}
}
};
module.exports = ReactOwner;
},{"./invariant":155}],86:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactPerf
* @typechecks static-only
*/
'use strict';
/**
* ReactPerf is a general AOP system designed to measure performance. This
* module only has the hooks: see ReactDefaultPerf for the analysis tool.
*/
var ReactPerf = {
/**
* Boolean to enable/disable measurement. Set to false by default to prevent
* accidental logging and perf loss.
*/
enableMeasure: false,
/**
* Holds onto the measure function in use. By default, don't measure
* anything, but we'll override this if we inject a measure function.
*/
storedMeasure: _noMeasure,
/**
* @param {object} object
* @param {string} objectName
* @param {object<string>} methodNames
*/
measureMethods: function(object, objectName, methodNames) {
if ("production" !== "production") {
for (var key in methodNames) {
if (!methodNames.hasOwnProperty(key)) {
continue;
}
object[key] = ReactPerf.measure(
objectName,
methodNames[key],
object[key]
);
}
}
},
/**
* Use this to wrap methods you want to measure. Zero overhead in production.
*
* @param {string} objName
* @param {string} fnName
* @param {function} func
* @return {function}
*/
measure: function(objName, fnName, func) {
if ("production" !== "production") {
var measuredFunc = null;
var wrapper = function() {
if (ReactPerf.enableMeasure) {
if (!measuredFunc) {
measuredFunc = ReactPerf.storedMeasure(objName, fnName, func);
}
return measuredFunc.apply(this, arguments);
}
return func.apply(this, arguments);
};
wrapper.displayName = objName + '_' + fnName;
return wrapper;
}
return func;
},
injection: {
/**
* @param {function} measure
*/
injectMeasure: function(measure) {
ReactPerf.storedMeasure = measure;
}
}
};
/**
* Simply passes through the measured function, without measuring it.
*
* @param {string} objName
* @param {string} fnName
* @param {function} func
* @return {function}
*/
function _noMeasure(objName, fnName, func) {
return func;
}
module.exports = ReactPerf;
},{}],87:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactPropTransferer
*/
'use strict';
var assign = require("./Object.assign");
var emptyFunction = require("./emptyFunction");
var joinClasses = require("./joinClasses");
/**
* Creates a transfer strategy that will merge prop values using the supplied
* `mergeStrategy`. If a prop was previously unset, this just sets it.
*
* @param {function} mergeStrategy
* @return {function}
*/
function createTransferStrategy(mergeStrategy) {
return function(props, key, value) {
if (!props.hasOwnProperty(key)) {
props[key] = value;
} else {
props[key] = mergeStrategy(props[key], value);
}
};
}
var transferStrategyMerge = createTransferStrategy(function(a, b) {
// `merge` overrides the first object's (`props[key]` above) keys using the
// second object's (`value`) keys. An object's style's existing `propA` would
// get overridden. Flip the order here.
return assign({}, b, a);
});
/**
* Transfer strategies dictate how props are transferred by `transferPropsTo`.
* NOTE: if you add any more exceptions to this list you should be sure to
* update `cloneWithProps()` accordingly.
*/
var TransferStrategies = {
/**
* Never transfer `children`.
*/
children: emptyFunction,
/**
* Transfer the `className` prop by merging them.
*/
className: createTransferStrategy(joinClasses),
/**
* Transfer the `style` prop (which is an object) by merging them.
*/
style: transferStrategyMerge
};
/**
* Mutates the first argument by transferring the properties from the second
* argument.
*
* @param {object} props
* @param {object} newProps
* @return {object}
*/
function transferInto(props, newProps) {
for (var thisKey in newProps) {
if (!newProps.hasOwnProperty(thisKey)) {
continue;
}
var transferStrategy = TransferStrategies[thisKey];
if (transferStrategy && TransferStrategies.hasOwnProperty(thisKey)) {
transferStrategy(props, thisKey, newProps[thisKey]);
} else if (!props.hasOwnProperty(thisKey)) {
props[thisKey] = newProps[thisKey];
}
}
return props;
}
/**
* ReactPropTransferer are capable of transferring props to another component
* using a `transferPropsTo` method.
*
* @class ReactPropTransferer
*/
var ReactPropTransferer = {
/**
* Merge two props objects using TransferStrategies.
*
* @param {object} oldProps original props (they take precedence)
* @param {object} newProps new props to merge in
* @return {object} a new object containing both sets of props merged.
*/
mergeProps: function(oldProps, newProps) {
return transferInto(assign({}, oldProps), newProps);
}
};
module.exports = ReactPropTransferer;
},{"./Object.assign":33,"./emptyFunction":134,"./joinClasses":160}],88:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactPropTypeLocationNames
*/
'use strict';
var ReactPropTypeLocationNames = {};
if ("production" !== "production") {
ReactPropTypeLocationNames = {
prop: 'prop',
context: 'context',
childContext: 'child context'
};
}
module.exports = ReactPropTypeLocationNames;
},{}],89:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactPropTypeLocations
*/
'use strict';
var keyMirror = require("./keyMirror");
var ReactPropTypeLocations = keyMirror({
prop: null,
context: null,
childContext: null
});
module.exports = ReactPropTypeLocations;
},{"./keyMirror":161}],90:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactPropTypes
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactFragment = require("./ReactFragment");
var ReactPropTypeLocationNames = require("./ReactPropTypeLocationNames");
var emptyFunction = require("./emptyFunction");
/**
* Collection of methods that allow declaration and validation of props that are
* supplied to React components. Example usage:
*
* var Props = require('ReactPropTypes');
* var MyArticle = React.createClass({
* propTypes: {
* // An optional string prop named "description".
* description: Props.string,
*
* // A required enum prop named "category".
* category: Props.oneOf(['News','Photos']).isRequired,
*
* // A prop named "dialog" that requires an instance of Dialog.
* dialog: Props.instanceOf(Dialog).isRequired
* },
* render: function() { ... }
* });
*
* A more formal specification of how these methods are used:
*
* type := array|bool|func|object|number|string|oneOf([...])|instanceOf(...)
* decl := ReactPropTypes.{type}(.isRequired)?
*
* Each and every declaration produces a function with the same signature. This
* allows the creation of custom validation functions. For example:
*
* var MyLink = React.createClass({
* propTypes: {
* // An optional string or URI prop named "href".
* href: function(props, propName, componentName) {
* var propValue = props[propName];
* if (propValue != null && typeof propValue !== 'string' &&
* !(propValue instanceof URI)) {
* return new Error(
* 'Expected a string or an URI for ' + propName + ' in ' +
* componentName
* );
* }
* }
* },
* render: function() {...}
* });
*
* @internal
*/
var ANONYMOUS = '<<anonymous>>';
var elementTypeChecker = createElementTypeChecker();
var nodeTypeChecker = createNodeChecker();
var ReactPropTypes = {
array: createPrimitiveTypeChecker('array'),
bool: createPrimitiveTypeChecker('boolean'),
func: createPrimitiveTypeChecker('function'),
number: createPrimitiveTypeChecker('number'),
object: createPrimitiveTypeChecker('object'),
string: createPrimitiveTypeChecker('string'),
any: createAnyTypeChecker(),
arrayOf: createArrayOfTypeChecker,
element: elementTypeChecker,
instanceOf: createInstanceTypeChecker,
node: nodeTypeChecker,
objectOf: createObjectOfTypeChecker,
oneOf: createEnumTypeChecker,
oneOfType: createUnionTypeChecker,
shape: createShapeTypeChecker
};
function createChainableTypeChecker(validate) {
function checkType(isRequired, props, propName, componentName, location) {
componentName = componentName || ANONYMOUS;
if (props[propName] == null) {
var locationName = ReactPropTypeLocationNames[location];
if (isRequired) {
return new Error(
("Required " + locationName + " `" + propName + "` was not specified in ") +
("`" + componentName + "`.")
);
}
return null;
} else {
return validate(props, propName, componentName, location);
}
}
var chainedCheckType = checkType.bind(null, false);
chainedCheckType.isRequired = checkType.bind(null, true);
return chainedCheckType;
}
function createPrimitiveTypeChecker(expectedType) {
function validate(props, propName, componentName, location) {
var propValue = props[propName];
var propType = getPropType(propValue);
if (propType !== expectedType) {
var locationName = ReactPropTypeLocationNames[location];
// `propValue` being instance of, say, date/regexp, pass the 'object'
// check, but we can offer a more precise error message here rather than
// 'of type `object`'.
var preciseType = getPreciseType(propValue);
return new Error(
("Invalid " + locationName + " `" + propName + "` of type `" + preciseType + "` ") +
("supplied to `" + componentName + "`, expected `" + expectedType + "`.")
);
}
return null;
}
return createChainableTypeChecker(validate);
}
function createAnyTypeChecker() {
return createChainableTypeChecker(emptyFunction.thatReturns(null));
}
function createArrayOfTypeChecker(typeChecker) {
function validate(props, propName, componentName, location) {
var propValue = props[propName];
if (!Array.isArray(propValue)) {
var locationName = ReactPropTypeLocationNames[location];
var propType = getPropType(propValue);
return new Error(
("Invalid " + locationName + " `" + propName + "` of type ") +
("`" + propType + "` supplied to `" + componentName + "`, expected an array.")
);
}
for (var i = 0; i < propValue.length; i++) {
var error = typeChecker(propValue, i, componentName, location);
if (error instanceof Error) {
return error;
}
}
return null;
}
return createChainableTypeChecker(validate);
}
function createElementTypeChecker() {
function validate(props, propName, componentName, location) {
if (!ReactElement.isValidElement(props[propName])) {
var locationName = ReactPropTypeLocationNames[location];
return new Error(
("Invalid " + locationName + " `" + propName + "` supplied to ") +
("`" + componentName + "`, expected a ReactElement.")
);
}
return null;
}
return createChainableTypeChecker(validate);
}
function createInstanceTypeChecker(expectedClass) {
function validate(props, propName, componentName, location) {
if (!(props[propName] instanceof expectedClass)) {
var locationName = ReactPropTypeLocationNames[location];
var expectedClassName = expectedClass.name || ANONYMOUS;
return new Error(
("Invalid " + locationName + " `" + propName + "` supplied to ") +
("`" + componentName + "`, expected instance of `" + expectedClassName + "`.")
);
}
return null;
}
return createChainableTypeChecker(validate);
}
function createEnumTypeChecker(expectedValues) {
function validate(props, propName, componentName, location) {
var propValue = props[propName];
for (var i = 0; i < expectedValues.length; i++) {
if (propValue === expectedValues[i]) {
return null;
}
}
var locationName = ReactPropTypeLocationNames[location];
var valuesString = JSON.stringify(expectedValues);
return new Error(
("Invalid " + locationName + " `" + propName + "` of value `" + propValue + "` ") +
("supplied to `" + componentName + "`, expected one of " + valuesString + ".")
);
}
return createChainableTypeChecker(validate);
}
function createObjectOfTypeChecker(typeChecker) {
function validate(props, propName, componentName, location) {
var propValue = props[propName];
var propType = getPropType(propValue);
if (propType !== 'object') {
var locationName = ReactPropTypeLocationNames[location];
return new Error(
("Invalid " + locationName + " `" + propName + "` of type ") +
("`" + propType + "` supplied to `" + componentName + "`, expected an object.")
);
}
for (var key in propValue) {
if (propValue.hasOwnProperty(key)) {
var error = typeChecker(propValue, key, componentName, location);
if (error instanceof Error) {
return error;
}
}
}
return null;
}
return createChainableTypeChecker(validate);
}
function createUnionTypeChecker(arrayOfTypeCheckers) {
function validate(props, propName, componentName, location) {
for (var i = 0; i < arrayOfTypeCheckers.length; i++) {
var checker = arrayOfTypeCheckers[i];
if (checker(props, propName, componentName, location) == null) {
return null;
}
}
var locationName = ReactPropTypeLocationNames[location];
return new Error(
("Invalid " + locationName + " `" + propName + "` supplied to ") +
("`" + componentName + "`.")
);
}
return createChainableTypeChecker(validate);
}
function createNodeChecker() {
function validate(props, propName, componentName, location) {
if (!isNode(props[propName])) {
var locationName = ReactPropTypeLocationNames[location];
return new Error(
("Invalid " + locationName + " `" + propName + "` supplied to ") +
("`" + componentName + "`, expected a ReactNode.")
);
}
return null;
}
return createChainableTypeChecker(validate);
}
function createShapeTypeChecker(shapeTypes) {
function validate(props, propName, componentName, location) {
var propValue = props[propName];
var propType = getPropType(propValue);
if (propType !== 'object') {
var locationName = ReactPropTypeLocationNames[location];
return new Error(
("Invalid " + locationName + " `" + propName + "` of type `" + propType + "` ") +
("supplied to `" + componentName + "`, expected `object`.")
);
}
for (var key in shapeTypes) {
var checker = shapeTypes[key];
if (!checker) {
continue;
}
var error = checker(propValue, key, componentName, location);
if (error) {
return error;
}
}
return null;
}
return createChainableTypeChecker(validate);
}
function isNode(propValue) {
switch (typeof propValue) {
case 'number':
case 'string':
case 'undefined':
return true;
case 'boolean':
return !propValue;
case 'object':
if (Array.isArray(propValue)) {
return propValue.every(isNode);
}
if (propValue === null || ReactElement.isValidElement(propValue)) {
return true;
}
propValue = ReactFragment.extractIfFragment(propValue);
for (var k in propValue) {
if (!isNode(propValue[k])) {
return false;
}
}
return true;
default:
return false;
}
}
// Equivalent of `typeof` but with special handling for array and regexp.
function getPropType(propValue) {
var propType = typeof propValue;
if (Array.isArray(propValue)) {
return 'array';
}
if (propValue instanceof RegExp) {
// Old webkits (at least until Android 4.0) return 'function' rather than
// 'object' for typeof a RegExp. We'll normalize this here so that /bla/
// passes PropTypes.object.
return 'object';
}
return propType;
}
// This handles more types than `getPropType`. Only used for error messages.
// See `createPrimitiveTypeChecker`.
function getPreciseType(propValue) {
var propType = getPropType(propValue);
if (propType === 'object') {
if (propValue instanceof Date) {
return 'date';
} else if (propValue instanceof RegExp) {
return 'regexp';
}
}
return propType;
}
module.exports = ReactPropTypes;
},{"./ReactElement":67,"./ReactFragment":73,"./ReactPropTypeLocationNames":88,"./emptyFunction":134}],91:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactPutListenerQueue
*/
'use strict';
var PooledClass = require("./PooledClass");
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var assign = require("./Object.assign");
function ReactPutListenerQueue() {
this.listenersToPut = [];
}
assign(ReactPutListenerQueue.prototype, {
enqueuePutListener: function(rootNodeID, propKey, propValue) {
this.listenersToPut.push({
rootNodeID: rootNodeID,
propKey: propKey,
propValue: propValue
});
},
putListeners: function() {
for (var i = 0; i < this.listenersToPut.length; i++) {
var listenerToPut = this.listenersToPut[i];
ReactBrowserEventEmitter.putListener(
listenerToPut.rootNodeID,
listenerToPut.propKey,
listenerToPut.propValue
);
}
},
reset: function() {
this.listenersToPut.length = 0;
},
destructor: function() {
this.reset();
}
});
PooledClass.addPoolingTo(ReactPutListenerQueue);
module.exports = ReactPutListenerQueue;
},{"./Object.assign":33,"./PooledClass":34,"./ReactBrowserEventEmitter":37}],92:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactReconcileTransaction
* @typechecks static-only
*/
'use strict';
var CallbackQueue = require("./CallbackQueue");
var PooledClass = require("./PooledClass");
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var ReactInputSelection = require("./ReactInputSelection");
var ReactPutListenerQueue = require("./ReactPutListenerQueue");
var Transaction = require("./Transaction");
var assign = require("./Object.assign");
/**
* Ensures that, when possible, the selection range (currently selected text
* input) is not disturbed by performing the transaction.
*/
var SELECTION_RESTORATION = {
/**
* @return {Selection} Selection information.
*/
initialize: ReactInputSelection.getSelectionInformation,
/**
* @param {Selection} sel Selection information returned from `initialize`.
*/
close: ReactInputSelection.restoreSelection
};
/**
* Suppresses events (blur/focus) that could be inadvertently dispatched due to
* high level DOM manipulations (like temporarily removing a text input from the
* DOM).
*/
var EVENT_SUPPRESSION = {
/**
* @return {boolean} The enabled status of `ReactBrowserEventEmitter` before
* the reconciliation.
*/
initialize: function() {
var currentlyEnabled = ReactBrowserEventEmitter.isEnabled();
ReactBrowserEventEmitter.setEnabled(false);
return currentlyEnabled;
},
/**
* @param {boolean} previouslyEnabled Enabled status of
* `ReactBrowserEventEmitter` before the reconciliation occured. `close`
* restores the previous value.
*/
close: function(previouslyEnabled) {
ReactBrowserEventEmitter.setEnabled(previouslyEnabled);
}
};
/**
* Provides a queue for collecting `componentDidMount` and
* `componentDidUpdate` callbacks during the the transaction.
*/
var ON_DOM_READY_QUEUEING = {
/**
* Initializes the internal `onDOMReady` queue.
*/
initialize: function() {
this.reactMountReady.reset();
},
/**
* After DOM is flushed, invoke all registered `onDOMReady` callbacks.
*/
close: function() {
this.reactMountReady.notifyAll();
}
};
var PUT_LISTENER_QUEUEING = {
initialize: function() {
this.putListenerQueue.reset();
},
close: function() {
this.putListenerQueue.putListeners();
}
};
/**
* Executed within the scope of the `Transaction` instance. Consider these as
* being member methods, but with an implied ordering while being isolated from
* each other.
*/
var TRANSACTION_WRAPPERS = [
PUT_LISTENER_QUEUEING,
SELECTION_RESTORATION,
EVENT_SUPPRESSION,
ON_DOM_READY_QUEUEING
];
/**
* Currently:
* - The order that these are listed in the transaction is critical:
* - Suppresses events.
* - Restores selection range.
*
* Future:
* - Restore document/overflow scroll positions that were unintentionally
* modified via DOM insertions above the top viewport boundary.
* - Implement/integrate with customized constraint based layout system and keep
* track of which dimensions must be remeasured.
*
* @class ReactReconcileTransaction
*/
function ReactReconcileTransaction() {
this.reinitializeTransaction();
// Only server-side rendering really needs this option (see
// `ReactServerRendering`), but server-side uses
// `ReactServerRenderingTransaction` instead. This option is here so that it's
// accessible and defaults to false when `ReactDOMComponent` and
// `ReactTextComponent` checks it in `mountComponent`.`
this.renderToStaticMarkup = false;
this.reactMountReady = CallbackQueue.getPooled(null);
this.putListenerQueue = ReactPutListenerQueue.getPooled();
}
var Mixin = {
/**
* @see Transaction
* @abstract
* @final
* @return {array<object>} List of operation wrap proceedures.
* TODO: convert to array<TransactionWrapper>
*/
getTransactionWrappers: function() {
return TRANSACTION_WRAPPERS;
},
/**
* @return {object} The queue to collect `onDOMReady` callbacks with.
*/
getReactMountReady: function() {
return this.reactMountReady;
},
getPutListenerQueue: function() {
return this.putListenerQueue;
},
/**
* `PooledClass` looks for this, and will invoke this before allowing this
* instance to be resused.
*/
destructor: function() {
CallbackQueue.release(this.reactMountReady);
this.reactMountReady = null;
ReactPutListenerQueue.release(this.putListenerQueue);
this.putListenerQueue = null;
}
};
assign(ReactReconcileTransaction.prototype, Transaction.Mixin, Mixin);
PooledClass.addPoolingTo(ReactReconcileTransaction);
module.exports = ReactReconcileTransaction;
},{"./CallbackQueue":11,"./Object.assign":33,"./PooledClass":34,"./ReactBrowserEventEmitter":37,"./ReactInputSelection":75,"./ReactPutListenerQueue":91,"./Transaction":121}],93:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactReconciler
*/
'use strict';
var ReactRef = require("./ReactRef");
var ReactElementValidator = require("./ReactElementValidator");
/**
* Helper to call ReactRef.attachRefs with this composite component, split out
* to avoid allocations in the transaction mount-ready queue.
*/
function attachRefs() {
ReactRef.attachRefs(this, this._currentElement);
}
var ReactReconciler = {
/**
* Initializes the component, renders markup, and registers event listeners.
*
* @param {ReactComponent} internalInstance
* @param {string} rootID DOM ID of the root node.
* @param {ReactReconcileTransaction|ReactServerRenderingTransaction} transaction
* @return {?string} Rendered markup to be inserted into the DOM.
* @final
* @internal
*/
mountComponent: function(internalInstance, rootID, transaction, context) {
var markup = internalInstance.mountComponent(rootID, transaction, context);
if ("production" !== "production") {
ReactElementValidator.checkAndWarnForMutatedProps(
internalInstance._currentElement
);
}
transaction.getReactMountReady().enqueue(attachRefs, internalInstance);
return markup;
},
/**
* Releases any resources allocated by `mountComponent`.
*
* @final
* @internal
*/
unmountComponent: function(internalInstance) {
ReactRef.detachRefs(internalInstance, internalInstance._currentElement);
internalInstance.unmountComponent();
},
/**
* Update a component using a new element.
*
* @param {ReactComponent} internalInstance
* @param {ReactElement} nextElement
* @param {ReactReconcileTransaction} transaction
* @param {object} context
* @internal
*/
receiveComponent: function(
internalInstance, nextElement, transaction, context
) {
var prevElement = internalInstance._currentElement;
if (nextElement === prevElement && nextElement._owner != null) {
// Since elements are immutable after the owner is rendered,
// we can do a cheap identity compare here to determine if this is a
// superfluous reconcile. It's possible for state to be mutable but such
// change should trigger an update of the owner which would recreate
// the element. We explicitly check for the existence of an owner since
// it's possible for an element created outside a composite to be
// deeply mutated and reused.
return;
}
if ("production" !== "production") {
ReactElementValidator.checkAndWarnForMutatedProps(nextElement);
}
var refsChanged = ReactRef.shouldUpdateRefs(
prevElement,
nextElement
);
if (refsChanged) {
ReactRef.detachRefs(internalInstance, prevElement);
}
internalInstance.receiveComponent(nextElement, transaction, context);
if (refsChanged) {
transaction.getReactMountReady().enqueue(attachRefs, internalInstance);
}
},
/**
* Flush any dirty changes in a component.
*
* @param {ReactComponent} internalInstance
* @param {ReactReconcileTransaction} transaction
* @internal
*/
performUpdateIfNecessary: function(
internalInstance,
transaction
) {
internalInstance.performUpdateIfNecessary(transaction);
}
};
module.exports = ReactReconciler;
},{"./ReactElementValidator":68,"./ReactRef":94}],94:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactRef
*/
'use strict';
var ReactOwner = require("./ReactOwner");
var ReactRef = {};
function attachRef(ref, component, owner) {
if (typeof ref === 'function') {
ref(component.getPublicInstance());
} else {
// Legacy ref
ReactOwner.addComponentAsRefTo(component, ref, owner);
}
}
function detachRef(ref, component, owner) {
if (typeof ref === 'function') {
ref(null);
} else {
// Legacy ref
ReactOwner.removeComponentAsRefFrom(component, ref, owner);
}
}
ReactRef.attachRefs = function(instance, element) {
var ref = element.ref;
if (ref != null) {
attachRef(ref, instance, element._owner);
}
};
ReactRef.shouldUpdateRefs = function(prevElement, nextElement) {
// If either the owner or a `ref` has changed, make sure the newest owner
// has stored a reference to `this`, and the previous owner (if different)
// has forgotten the reference to `this`. We use the element instead
// of the public this.props because the post processing cannot determine
// a ref. The ref conceptually lives on the element.
// TODO: Should this even be possible? The owner cannot change because
// it's forbidden by shouldUpdateReactComponent. The ref can change
// if you swap the keys of but not the refs. Reconsider where this check
// is made. It probably belongs where the key checking and
// instantiateReactComponent is done.
return (
nextElement._owner !== prevElement._owner ||
nextElement.ref !== prevElement.ref
);
};
ReactRef.detachRefs = function(instance, element) {
var ref = element.ref;
if (ref != null) {
detachRef(ref, instance, element._owner);
}
};
module.exports = ReactRef;
},{"./ReactOwner":85}],95:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactRootIndex
* @typechecks
*/
'use strict';
var ReactRootIndexInjection = {
/**
* @param {function} _createReactRootIndex
*/
injectCreateReactRootIndex: function(_createReactRootIndex) {
ReactRootIndex.createReactRootIndex = _createReactRootIndex;
}
};
var ReactRootIndex = {
createReactRootIndex: null,
injection: ReactRootIndexInjection
};
module.exports = ReactRootIndex;
},{}],96:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @typechecks static-only
* @providesModule ReactServerRendering
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var ReactMarkupChecksum = require("./ReactMarkupChecksum");
var ReactServerRenderingTransaction =
require("./ReactServerRenderingTransaction");
var emptyObject = require("./emptyObject");
var instantiateReactComponent = require("./instantiateReactComponent");
var invariant = require("./invariant");
/**
* @param {ReactElement} element
* @return {string} the HTML markup
*/
function renderToString(element) {
("production" !== "production" ? invariant(
ReactElement.isValidElement(element),
'renderToString(): You must pass a valid ReactElement.'
) : invariant(ReactElement.isValidElement(element)));
var transaction;
try {
var id = ReactInstanceHandles.createReactRootID();
transaction = ReactServerRenderingTransaction.getPooled(false);
return transaction.perform(function() {
var componentInstance = instantiateReactComponent(element, null);
var markup =
componentInstance.mountComponent(id, transaction, emptyObject);
return ReactMarkupChecksum.addChecksumToMarkup(markup);
}, null);
} finally {
ReactServerRenderingTransaction.release(transaction);
}
}
/**
* @param {ReactElement} element
* @return {string} the HTML markup, without the extra React ID and checksum
* (for generating static pages)
*/
function renderToStaticMarkup(element) {
("production" !== "production" ? invariant(
ReactElement.isValidElement(element),
'renderToStaticMarkup(): You must pass a valid ReactElement.'
) : invariant(ReactElement.isValidElement(element)));
var transaction;
try {
var id = ReactInstanceHandles.createReactRootID();
transaction = ReactServerRenderingTransaction.getPooled(true);
return transaction.perform(function() {
var componentInstance = instantiateReactComponent(element, null);
return componentInstance.mountComponent(id, transaction, emptyObject);
}, null);
} finally {
ReactServerRenderingTransaction.release(transaction);
}
}
module.exports = {
renderToString: renderToString,
renderToStaticMarkup: renderToStaticMarkup
};
},{"./ReactElement":67,"./ReactInstanceHandles":76,"./ReactMarkupChecksum":80,"./ReactServerRenderingTransaction":97,"./emptyObject":135,"./instantiateReactComponent":154,"./invariant":155}],97:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactServerRenderingTransaction
* @typechecks
*/
'use strict';
var PooledClass = require("./PooledClass");
var CallbackQueue = require("./CallbackQueue");
var ReactPutListenerQueue = require("./ReactPutListenerQueue");
var Transaction = require("./Transaction");
var assign = require("./Object.assign");
var emptyFunction = require("./emptyFunction");
/**
* Provides a `CallbackQueue` queue for collecting `onDOMReady` callbacks
* during the performing of the transaction.
*/
var ON_DOM_READY_QUEUEING = {
/**
* Initializes the internal `onDOMReady` queue.
*/
initialize: function() {
this.reactMountReady.reset();
},
close: emptyFunction
};
var PUT_LISTENER_QUEUEING = {
initialize: function() {
this.putListenerQueue.reset();
},
close: emptyFunction
};
/**
* Executed within the scope of the `Transaction` instance. Consider these as
* being member methods, but with an implied ordering while being isolated from
* each other.
*/
var TRANSACTION_WRAPPERS = [
PUT_LISTENER_QUEUEING,
ON_DOM_READY_QUEUEING
];
/**
* @class ReactServerRenderingTransaction
* @param {boolean} renderToStaticMarkup
*/
function ReactServerRenderingTransaction(renderToStaticMarkup) {
this.reinitializeTransaction();
this.renderToStaticMarkup = renderToStaticMarkup;
this.reactMountReady = CallbackQueue.getPooled(null);
this.putListenerQueue = ReactPutListenerQueue.getPooled();
}
var Mixin = {
/**
* @see Transaction
* @abstract
* @final
* @return {array} Empty list of operation wrap proceedures.
*/
getTransactionWrappers: function() {
return TRANSACTION_WRAPPERS;
},
/**
* @return {object} The queue to collect `onDOMReady` callbacks with.
*/
getReactMountReady: function() {
return this.reactMountReady;
},
getPutListenerQueue: function() {
return this.putListenerQueue;
},
/**
* `PooledClass` looks for this, and will invoke this before allowing this
* instance to be resused.
*/
destructor: function() {
CallbackQueue.release(this.reactMountReady);
this.reactMountReady = null;
ReactPutListenerQueue.release(this.putListenerQueue);
this.putListenerQueue = null;
}
};
assign(
ReactServerRenderingTransaction.prototype,
Transaction.Mixin,
Mixin
);
PooledClass.addPoolingTo(ReactServerRenderingTransaction);
module.exports = ReactServerRenderingTransaction;
},{"./CallbackQueue":11,"./Object.assign":33,"./PooledClass":34,"./ReactPutListenerQueue":91,"./Transaction":121,"./emptyFunction":134}],98:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactStateSetters
*/
'use strict';
var ReactStateSetters = {
/**
* Returns a function that calls the provided function, and uses the result
* of that to set the component's state.
*
* @param {ReactCompositeComponent} component
* @param {function} funcReturningState Returned callback uses this to
* determine how to update state.
* @return {function} callback that when invoked uses funcReturningState to
* determined the object literal to setState.
*/
createStateSetter: function(component, funcReturningState) {
return function(a, b, c, d, e, f) {
var partialState = funcReturningState.call(component, a, b, c, d, e, f);
if (partialState) {
component.setState(partialState);
}
};
},
/**
* Returns a single-argument callback that can be used to update a single
* key in the component's state.
*
* Note: this is memoized function, which makes it inexpensive to call.
*
* @param {ReactCompositeComponent} component
* @param {string} key The key in the state that you should update.
* @return {function} callback of 1 argument which calls setState() with
* the provided keyName and callback argument.
*/
createStateKeySetter: function(component, key) {
// Memoize the setters.
var cache = component.__keySetters || (component.__keySetters = {});
return cache[key] || (cache[key] = createStateKeySetter(component, key));
}
};
function createStateKeySetter(component, key) {
// Partial state is allocated outside of the function closure so it can be
// reused with every call, avoiding memory allocation when this function
// is called.
var partialState = {};
return function stateKeySetter(value) {
partialState[key] = value;
component.setState(partialState);
};
}
ReactStateSetters.Mixin = {
/**
* Returns a function that calls the provided function, and uses the result
* of that to set the component's state.
*
* For example, these statements are equivalent:
*
* this.setState({x: 1});
* this.createStateSetter(function(xValue) {
* return {x: xValue};
* })(1);
*
* @param {function} funcReturningState Returned callback uses this to
* determine how to update state.
* @return {function} callback that when invoked uses funcReturningState to
* determined the object literal to setState.
*/
createStateSetter: function(funcReturningState) {
return ReactStateSetters.createStateSetter(this, funcReturningState);
},
/**
* Returns a single-argument callback that can be used to update a single
* key in the component's state.
*
* For example, these statements are equivalent:
*
* this.setState({x: 1});
* this.createStateKeySetter('x')(1);
*
* Note: this is memoized function, which makes it inexpensive to call.
*
* @param {string} key The key in the state that you should update.
* @return {function} callback of 1 argument which calls setState() with
* the provided keyName and callback argument.
*/
createStateKeySetter: function(key) {
return ReactStateSetters.createStateKeySetter(this, key);
}
};
module.exports = ReactStateSetters;
},{}],99:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactTestUtils
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPluginHub = require("./EventPluginHub");
var EventPropagators = require("./EventPropagators");
var React = require("./React");
var ReactElement = require("./ReactElement");
var ReactEmptyComponent = require("./ReactEmptyComponent");
var ReactBrowserEventEmitter = require("./ReactBrowserEventEmitter");
var ReactCompositeComponent = require("./ReactCompositeComponent");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var ReactInstanceMap = require("./ReactInstanceMap");
var ReactMount = require("./ReactMount");
var ReactUpdates = require("./ReactUpdates");
var SyntheticEvent = require("./SyntheticEvent");
var assign = require("./Object.assign");
var emptyObject = require("./emptyObject");
var topLevelTypes = EventConstants.topLevelTypes;
function Event(suffix) {}
/**
* @class ReactTestUtils
*/
/**
* Todo: Support the entire DOM.scry query syntax. For now, these simple
* utilities will suffice for testing purposes.
* @lends ReactTestUtils
*/
var ReactTestUtils = {
renderIntoDocument: function(instance) {
var div = document.createElement('div');
// None of our tests actually require attaching the container to the
// DOM, and doing so creates a mess that we rely on test isolation to
// clean up, so we're going to stop honoring the name of this method
// (and probably rename it eventually) if no problems arise.
// document.documentElement.appendChild(div);
return React.render(instance, div);
},
isElement: function(element) {
return ReactElement.isValidElement(element);
},
isElementOfType: function(inst, convenienceConstructor) {
return (
ReactElement.isValidElement(inst) &&
inst.type === convenienceConstructor
);
},
isDOMComponent: function(inst) {
// TODO: Fix this heuristic. It's just here because composites can currently
// pretend to be DOM components.
return !!(inst && inst.tagName && inst.getDOMNode);
},
isDOMComponentElement: function(inst) {
return !!(inst &&
ReactElement.isValidElement(inst) &&
!!inst.tagName);
},
isCompositeComponent: function(inst) {
return typeof inst.render === 'function' &&
typeof inst.setState === 'function';
},
isCompositeComponentWithType: function(inst, type) {
return !!(ReactTestUtils.isCompositeComponent(inst) &&
(inst.constructor === type));
},
isCompositeComponentElement: function(inst) {
if (!ReactElement.isValidElement(inst)) {
return false;
}
// We check the prototype of the type that will get mounted, not the
// instance itself. This is a future proof way of duck typing.
var prototype = inst.type.prototype;
return (
typeof prototype.render === 'function' &&
typeof prototype.setState === 'function'
);
},
isCompositeComponentElementWithType: function(inst, type) {
return !!(ReactTestUtils.isCompositeComponentElement(inst) &&
(inst.constructor === type));
},
getRenderedChildOfCompositeComponent: function(inst) {
if (!ReactTestUtils.isCompositeComponent(inst)) {
return null;
}
var internalInstance = ReactInstanceMap.get(inst);
return internalInstance._renderedComponent.getPublicInstance();
},
findAllInRenderedTree: function(inst, test) {
if (!inst) {
return [];
}
var ret = test(inst) ? [inst] : [];
if (ReactTestUtils.isDOMComponent(inst)) {
var internalInstance = ReactInstanceMap.get(inst);
var renderedChildren = internalInstance
._renderedComponent
._renderedChildren;
var key;
for (key in renderedChildren) {
if (!renderedChildren.hasOwnProperty(key)) {
continue;
}
if (!renderedChildren[key].getPublicInstance) {
continue;
}
ret = ret.concat(
ReactTestUtils.findAllInRenderedTree(
renderedChildren[key].getPublicInstance(),
test
)
);
}
} else if (ReactTestUtils.isCompositeComponent(inst)) {
ret = ret.concat(
ReactTestUtils.findAllInRenderedTree(
ReactTestUtils.getRenderedChildOfCompositeComponent(inst),
test
)
);
}
return ret;
},
/**
* Finds all instance of components in the rendered tree that are DOM
* components with the class name matching `className`.
* @return an array of all the matches.
*/
scryRenderedDOMComponentsWithClass: function(root, className) {
return ReactTestUtils.findAllInRenderedTree(root, function(inst) {
var instClassName = inst.props.className;
return ReactTestUtils.isDOMComponent(inst) && (
(instClassName && (' ' + instClassName + ' ').indexOf(' ' + className + ' ') !== -1)
);
});
},
/**
* Like scryRenderedDOMComponentsWithClass but expects there to be one result,
* and returns that one result, or throws exception if there is any other
* number of matches besides one.
* @return {!ReactDOMComponent} The one match.
*/
findRenderedDOMComponentWithClass: function(root, className) {
var all =
ReactTestUtils.scryRenderedDOMComponentsWithClass(root, className);
if (all.length !== 1) {
throw new Error('Did not find exactly one match ' +
'(found: ' + all.length + ') for class:' + className
);
}
return all[0];
},
/**
* Finds all instance of components in the rendered tree that are DOM
* components with the tag name matching `tagName`.
* @return an array of all the matches.
*/
scryRenderedDOMComponentsWithTag: function(root, tagName) {
return ReactTestUtils.findAllInRenderedTree(root, function(inst) {
return ReactTestUtils.isDOMComponent(inst) &&
inst.tagName === tagName.toUpperCase();
});
},
/**
* Like scryRenderedDOMComponentsWithTag but expects there to be one result,
* and returns that one result, or throws exception if there is any other
* number of matches besides one.
* @return {!ReactDOMComponent} The one match.
*/
findRenderedDOMComponentWithTag: function(root, tagName) {
var all = ReactTestUtils.scryRenderedDOMComponentsWithTag(root, tagName);
if (all.length !== 1) {
throw new Error('Did not find exactly one match for tag:' + tagName);
}
return all[0];
},
/**
* Finds all instances of components with type equal to `componentType`.
* @return an array of all the matches.
*/
scryRenderedComponentsWithType: function(root, componentType) {
return ReactTestUtils.findAllInRenderedTree(root, function(inst) {
return ReactTestUtils.isCompositeComponentWithType(
inst,
componentType
);
});
},
/**
* Same as `scryRenderedComponentsWithType` but expects there to be one result
* and returns that one result, or throws exception if there is any other
* number of matches besides one.
* @return {!ReactComponent} The one match.
*/
findRenderedComponentWithType: function(root, componentType) {
var all = ReactTestUtils.scryRenderedComponentsWithType(
root,
componentType
);
if (all.length !== 1) {
throw new Error(
'Did not find exactly one match for componentType:' + componentType
);
}
return all[0];
},
/**
* Pass a mocked component module to this method to augment it with
* useful methods that allow it to be used as a dummy React component.
* Instead of rendering as usual, the component will become a simple
* <div> containing any provided children.
*
* @param {object} module the mock function object exported from a
* module that defines the component to be mocked
* @param {?string} mockTagName optional dummy root tag name to return
* from render method (overrides
* module.mockTagName if provided)
* @return {object} the ReactTestUtils object (for chaining)
*/
mockComponent: function(module, mockTagName) {
mockTagName = mockTagName || module.mockTagName || "div";
module.prototype.render.mockImplementation(function() {
return React.createElement(
mockTagName,
null,
this.props.children
);
});
return this;
},
/**
* Simulates a top level event being dispatched from a raw event that occured
* on an `Element` node.
* @param topLevelType {Object} A type from `EventConstants.topLevelTypes`
* @param {!Element} node The dom to simulate an event occurring on.
* @param {?Event} fakeNativeEvent Fake native event to use in SyntheticEvent.
*/
simulateNativeEventOnNode: function(topLevelType, node, fakeNativeEvent) {
fakeNativeEvent.target = node;
ReactBrowserEventEmitter.ReactEventListener.dispatchEvent(
topLevelType,
fakeNativeEvent
);
},
/**
* Simulates a top level event being dispatched from a raw event that occured
* on the `ReactDOMComponent` `comp`.
* @param topLevelType {Object} A type from `EventConstants.topLevelTypes`.
* @param comp {!ReactDOMComponent}
* @param {?Event} fakeNativeEvent Fake native event to use in SyntheticEvent.
*/
simulateNativeEventOnDOMComponent: function(
topLevelType,
comp,
fakeNativeEvent) {
ReactTestUtils.simulateNativeEventOnNode(
topLevelType,
comp.getDOMNode(),
fakeNativeEvent
);
},
nativeTouchData: function(x, y) {
return {
touches: [
{pageX: x, pageY: y}
]
};
},
createRenderer: function() {
return new ReactShallowRenderer();
},
Simulate: null,
SimulateNative: {}
};
/**
* @class ReactShallowRenderer
*/
var ReactShallowRenderer = function() {
this._instance = null;
};
ReactShallowRenderer.prototype.getRenderOutput = function() {
return (
(this._instance && this._instance._renderedComponent &&
this._instance._renderedComponent._renderedOutput)
|| null
);
};
var NoopInternalComponent = function(element) {
this._renderedOutput = element;
this._currentElement = element === null || element === false ?
ReactEmptyComponent.emptyElement :
element;
};
NoopInternalComponent.prototype = {
mountComponent: function() {
},
receiveComponent: function(element) {
this._renderedOutput = element;
this._currentElement = element === null || element === false ?
ReactEmptyComponent.emptyElement :
element;
},
unmountComponent: function() {
}
};
var ShallowComponentWrapper = function() { };
assign(
ShallowComponentWrapper.prototype,
ReactCompositeComponent.Mixin, {
_instantiateReactComponent: function(element) {
return new NoopInternalComponent(element);
},
_replaceNodeWithMarkupByID: function() {},
_renderValidatedComponent:
ReactCompositeComponent.Mixin.
_renderValidatedComponentWithoutOwnerOrContext
}
);
ReactShallowRenderer.prototype.render = function(element, context) {
if (!context) {
context = emptyObject;
}
var transaction = ReactUpdates.ReactReconcileTransaction.getPooled();
this._render(element, transaction, context);
ReactUpdates.ReactReconcileTransaction.release(transaction);
};
ReactShallowRenderer.prototype.unmount = function() {
if (this._instance) {
this._instance.unmountComponent();
}
};
ReactShallowRenderer.prototype._render = function(element, transaction, context) {
if (!this._instance) {
var rootID = ReactInstanceHandles.createReactRootID();
var instance = new ShallowComponentWrapper(element.type);
instance.construct(element);
instance.mountComponent(rootID, transaction, context);
this._instance = instance;
} else {
this._instance.receiveComponent(element, transaction, context);
}
};
/**
* Exports:
*
* - `ReactTestUtils.Simulate.click(Element/ReactDOMComponent)`
* - `ReactTestUtils.Simulate.mouseMove(Element/ReactDOMComponent)`
* - `ReactTestUtils.Simulate.change(Element/ReactDOMComponent)`
* - ... (All keys from event plugin `eventTypes` objects)
*/
function makeSimulator(eventType) {
return function(domComponentOrNode, eventData) {
var node;
if (ReactTestUtils.isDOMComponent(domComponentOrNode)) {
node = domComponentOrNode.getDOMNode();
} else if (domComponentOrNode.tagName) {
node = domComponentOrNode;
}
var fakeNativeEvent = new Event();
fakeNativeEvent.target = node;
// We don't use SyntheticEvent.getPooled in order to not have to worry about
// properly destroying any properties assigned from `eventData` upon release
var event = new SyntheticEvent(
ReactBrowserEventEmitter.eventNameDispatchConfigs[eventType],
ReactMount.getID(node),
fakeNativeEvent
);
assign(event, eventData);
EventPropagators.accumulateTwoPhaseDispatches(event);
ReactUpdates.batchedUpdates(function() {
EventPluginHub.enqueueEvents(event);
EventPluginHub.processEventQueue();
});
};
}
function buildSimulators() {
ReactTestUtils.Simulate = {};
var eventType;
for (eventType in ReactBrowserEventEmitter.eventNameDispatchConfigs) {
/**
* @param {!Element || ReactDOMComponent} domComponentOrNode
* @param {?object} eventData Fake event data to use in SyntheticEvent.
*/
ReactTestUtils.Simulate[eventType] = makeSimulator(eventType);
}
}
// Rebuild ReactTestUtils.Simulate whenever event plugins are injected
var oldInjectEventPluginOrder = EventPluginHub.injection.injectEventPluginOrder;
EventPluginHub.injection.injectEventPluginOrder = function() {
oldInjectEventPluginOrder.apply(this, arguments);
buildSimulators();
};
var oldInjectEventPlugins = EventPluginHub.injection.injectEventPluginsByName;
EventPluginHub.injection.injectEventPluginsByName = function() {
oldInjectEventPlugins.apply(this, arguments);
buildSimulators();
};
buildSimulators();
/**
* Exports:
*
* - `ReactTestUtils.SimulateNative.click(Element/ReactDOMComponent)`
* - `ReactTestUtils.SimulateNative.mouseMove(Element/ReactDOMComponent)`
* - `ReactTestUtils.SimulateNative.mouseIn/ReactDOMComponent)`
* - `ReactTestUtils.SimulateNative.mouseOut(Element/ReactDOMComponent)`
* - ... (All keys from `EventConstants.topLevelTypes`)
*
* Note: Top level event types are a subset of the entire set of handler types
* (which include a broader set of "synthetic" events). For example, onDragDone
* is a synthetic event. Except when testing an event plugin or React's event
* handling code specifically, you probably want to use ReactTestUtils.Simulate
* to dispatch synthetic events.
*/
function makeNativeSimulator(eventType) {
return function(domComponentOrNode, nativeEventData) {
var fakeNativeEvent = new Event(eventType);
assign(fakeNativeEvent, nativeEventData);
if (ReactTestUtils.isDOMComponent(domComponentOrNode)) {
ReactTestUtils.simulateNativeEventOnDOMComponent(
eventType,
domComponentOrNode,
fakeNativeEvent
);
} else if (!!domComponentOrNode.tagName) {
// Will allow on actual dom nodes.
ReactTestUtils.simulateNativeEventOnNode(
eventType,
domComponentOrNode,
fakeNativeEvent
);
}
};
}
var eventType;
for (eventType in topLevelTypes) {
// Event type is stored as 'topClick' - we transform that to 'click'
var convenienceName = eventType.indexOf('top') === 0 ?
eventType.charAt(3).toLowerCase() + eventType.substr(4) : eventType;
/**
* @param {!Element || ReactDOMComponent} domComponentOrNode
* @param {?Event} nativeEventData Fake native event to use in SyntheticEvent.
*/
ReactTestUtils.SimulateNative[convenienceName] =
makeNativeSimulator(eventType);
}
module.exports = ReactTestUtils;
},{"./EventConstants":20,"./EventPluginHub":22,"./EventPropagators":25,"./Object.assign":33,"./React":35,"./ReactBrowserEventEmitter":37,"./ReactCompositeComponent":47,"./ReactElement":67,"./ReactEmptyComponent":69,"./ReactInstanceHandles":76,"./ReactInstanceMap":77,"./ReactMount":81,"./ReactUpdates":104,"./SyntheticEvent":113,"./emptyObject":135}],100:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @typechecks static-only
* @providesModule ReactTransitionChildMapping
*/
'use strict';
var ReactChildren = require("./ReactChildren");
var ReactFragment = require("./ReactFragment");
var ReactTransitionChildMapping = {
/**
* Given `this.props.children`, return an object mapping key to child. Just
* simple syntactic sugar around ReactChildren.map().
*
* @param {*} children `this.props.children`
* @return {object} Mapping of key to child
*/
getChildMapping: function(children) {
if (!children) {
return children;
}
return ReactFragment.extract(ReactChildren.map(children, function(child) {
return child;
}));
},
/**
* When you're adding or removing children some may be added or removed in the
* same render pass. We want to show *both* since we want to simultaneously
* animate elements in and out. This function takes a previous set of keys
* and a new set of keys and merges them with its best guess of the correct
* ordering. In the future we may expose some of the utilities in
* ReactMultiChild to make this easy, but for now React itself does not
* directly have this concept of the union of prevChildren and nextChildren
* so we implement it here.
*
* @param {object} prev prev children as returned from
* `ReactTransitionChildMapping.getChildMapping()`.
* @param {object} next next children as returned from
* `ReactTransitionChildMapping.getChildMapping()`.
* @return {object} a key set that contains all keys in `prev` and all keys
* in `next` in a reasonable order.
*/
mergeChildMappings: function(prev, next) {
prev = prev || {};
next = next || {};
function getValueForKey(key) {
if (next.hasOwnProperty(key)) {
return next[key];
} else {
return prev[key];
}
}
// For each key of `next`, the list of keys to insert before that key in
// the combined list
var nextKeysPending = {};
var pendingKeys = [];
for (var prevKey in prev) {
if (next.hasOwnProperty(prevKey)) {
if (pendingKeys.length) {
nextKeysPending[prevKey] = pendingKeys;
pendingKeys = [];
}
} else {
pendingKeys.push(prevKey);
}
}
var i;
var childMapping = {};
for (var nextKey in next) {
if (nextKeysPending.hasOwnProperty(nextKey)) {
for (i = 0; i < nextKeysPending[nextKey].length; i++) {
var pendingNextKey = nextKeysPending[nextKey][i];
childMapping[nextKeysPending[nextKey][i]] = getValueForKey(
pendingNextKey
);
}
}
childMapping[nextKey] = getValueForKey(nextKey);
}
// Finally, add the keys which didn't appear before any key in `next`
for (i = 0; i < pendingKeys.length; i++) {
childMapping[pendingKeys[i]] = getValueForKey(pendingKeys[i]);
}
return childMapping;
}
};
module.exports = ReactTransitionChildMapping;
},{"./ReactChildren":41,"./ReactFragment":73}],101:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactTransitionEvents
*/
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
/**
* EVENT_NAME_MAP is used to determine which event fired when a
* transition/animation ends, based on the style property used to
* define that event.
*/
var EVENT_NAME_MAP = {
transitionend: {
'transition': 'transitionend',
'WebkitTransition': 'webkitTransitionEnd',
'MozTransition': 'mozTransitionEnd',
'OTransition': 'oTransitionEnd',
'msTransition': 'MSTransitionEnd'
},
animationend: {
'animation': 'animationend',
'WebkitAnimation': 'webkitAnimationEnd',
'MozAnimation': 'mozAnimationEnd',
'OAnimation': 'oAnimationEnd',
'msAnimation': 'MSAnimationEnd'
}
};
var endEvents = [];
function detectEvents() {
var testEl = document.createElement('div');
var style = testEl.style;
// On some platforms, in particular some releases of Android 4.x,
// the un-prefixed "animation" and "transition" properties are defined on the
// style object but the events that fire will still be prefixed, so we need
// to check if the un-prefixed events are useable, and if not remove them
// from the map
if (!('AnimationEvent' in window)) {
delete EVENT_NAME_MAP.animationend.animation;
}
if (!('TransitionEvent' in window)) {
delete EVENT_NAME_MAP.transitionend.transition;
}
for (var baseEventName in EVENT_NAME_MAP) {
var baseEvents = EVENT_NAME_MAP[baseEventName];
for (var styleName in baseEvents) {
if (styleName in style) {
endEvents.push(baseEvents[styleName]);
break;
}
}
}
}
if (ExecutionEnvironment.canUseDOM) {
detectEvents();
}
// We use the raw {add|remove}EventListener() call because EventListener
// does not know how to remove event listeners and we really should
// clean up. Also, these events are not triggered in older browsers
// so we should be A-OK here.
function addEventListener(node, eventName, eventListener) {
node.addEventListener(eventName, eventListener, false);
}
function removeEventListener(node, eventName, eventListener) {
node.removeEventListener(eventName, eventListener, false);
}
var ReactTransitionEvents = {
addEndEventListener: function(node, eventListener) {
if (endEvents.length === 0) {
// If CSS transitions are not supported, trigger an "end animation"
// event immediately.
window.setTimeout(eventListener, 0);
return;
}
endEvents.forEach(function(endEvent) {
addEventListener(node, endEvent, eventListener);
});
},
removeEndEventListener: function(node, eventListener) {
if (endEvents.length === 0) {
return;
}
endEvents.forEach(function(endEvent) {
removeEventListener(node, endEvent, eventListener);
});
}
};
module.exports = ReactTransitionEvents;
},{"./ExecutionEnvironment":26}],102:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactTransitionGroup
*/
'use strict';
var React = require("./React");
var ReactTransitionChildMapping = require("./ReactTransitionChildMapping");
var assign = require("./Object.assign");
var cloneWithProps = require("./cloneWithProps");
var emptyFunction = require("./emptyFunction");
var ReactTransitionGroup = React.createClass({
displayName: 'ReactTransitionGroup',
propTypes: {
component: React.PropTypes.any,
childFactory: React.PropTypes.func
},
getDefaultProps: function() {
return {
component: 'span',
childFactory: emptyFunction.thatReturnsArgument
};
},
getInitialState: function() {
return {
children: ReactTransitionChildMapping.getChildMapping(this.props.children)
};
},
componentWillMount: function() {
this.currentlyTransitioningKeys = {};
this.keysToEnter = [];
this.keysToLeave = [];
},
componentDidMount: function() {
var initialChildMapping = this.state.children;
for (var key in initialChildMapping) {
if (initialChildMapping[key]) {
this.performAppear(key);
}
}
},
componentWillReceiveProps: function(nextProps) {
var nextChildMapping = ReactTransitionChildMapping.getChildMapping(
nextProps.children
);
var prevChildMapping = this.state.children;
this.setState({
children: ReactTransitionChildMapping.mergeChildMappings(
prevChildMapping,
nextChildMapping
)
});
var key;
for (key in nextChildMapping) {
var hasPrev = prevChildMapping && prevChildMapping.hasOwnProperty(key);
if (nextChildMapping[key] && !hasPrev &&
!this.currentlyTransitioningKeys[key]) {
this.keysToEnter.push(key);
}
}
for (key in prevChildMapping) {
var hasNext = nextChildMapping && nextChildMapping.hasOwnProperty(key);
if (prevChildMapping[key] && !hasNext &&
!this.currentlyTransitioningKeys[key]) {
this.keysToLeave.push(key);
}
}
// If we want to someday check for reordering, we could do it here.
},
componentDidUpdate: function() {
var keysToEnter = this.keysToEnter;
this.keysToEnter = [];
keysToEnter.forEach(this.performEnter);
var keysToLeave = this.keysToLeave;
this.keysToLeave = [];
keysToLeave.forEach(this.performLeave);
},
performAppear: function(key) {
this.currentlyTransitioningKeys[key] = true;
var component = this.refs[key];
if (component.componentWillAppear) {
component.componentWillAppear(
this._handleDoneAppearing.bind(this, key)
);
} else {
this._handleDoneAppearing(key);
}
},
_handleDoneAppearing: function(key) {
var component = this.refs[key];
if (component.componentDidAppear) {
component.componentDidAppear();
}
delete this.currentlyTransitioningKeys[key];
var currentChildMapping = ReactTransitionChildMapping.getChildMapping(
this.props.children
);
if (!currentChildMapping || !currentChildMapping.hasOwnProperty(key)) {
// This was removed before it had fully appeared. Remove it.
this.performLeave(key);
}
},
performEnter: function(key) {
this.currentlyTransitioningKeys[key] = true;
var component = this.refs[key];
if (component.componentWillEnter) {
component.componentWillEnter(
this._handleDoneEntering.bind(this, key)
);
} else {
this._handleDoneEntering(key);
}
},
_handleDoneEntering: function(key) {
var component = this.refs[key];
if (component.componentDidEnter) {
component.componentDidEnter();
}
delete this.currentlyTransitioningKeys[key];
var currentChildMapping = ReactTransitionChildMapping.getChildMapping(
this.props.children
);
if (!currentChildMapping || !currentChildMapping.hasOwnProperty(key)) {
// This was removed before it had fully entered. Remove it.
this.performLeave(key);
}
},
performLeave: function(key) {
this.currentlyTransitioningKeys[key] = true;
var component = this.refs[key];
if (component.componentWillLeave) {
component.componentWillLeave(this._handleDoneLeaving.bind(this, key));
} else {
// Note that this is somewhat dangerous b/c it calls setState()
// again, effectively mutating the component before all the work
// is done.
this._handleDoneLeaving(key);
}
},
_handleDoneLeaving: function(key) {
var component = this.refs[key];
if (component.componentDidLeave) {
component.componentDidLeave();
}
delete this.currentlyTransitioningKeys[key];
var currentChildMapping = ReactTransitionChildMapping.getChildMapping(
this.props.children
);
if (currentChildMapping && currentChildMapping.hasOwnProperty(key)) {
// This entered again before it fully left. Add it again.
this.performEnter(key);
} else {
var newChildren = assign({}, this.state.children);
delete newChildren[key];
this.setState({children: newChildren});
}
},
render: function() {
// TODO: we could get rid of the need for the wrapper node
// by cloning a single child
var childrenToRender = [];
for (var key in this.state.children) {
var child = this.state.children[key];
if (child) {
// You may need to apply reactive updates to a child as it is leaving.
// The normal React way to do it won't work since the child will have
// already been removed. In case you need this behavior you can provide
// a childFactory function to wrap every child, even the ones that are
// leaving.
childrenToRender.push(cloneWithProps(
this.props.childFactory(child),
{ref: key, key: key}
));
}
}
return React.createElement(
this.props.component,
this.props,
childrenToRender
);
}
});
module.exports = ReactTransitionGroup;
},{"./Object.assign":33,"./React":35,"./ReactTransitionChildMapping":100,"./cloneWithProps":127,"./emptyFunction":134}],103:[function(require,module,exports){
/**
* Copyright 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactUpdateQueue
*/
'use strict';
var ReactLifeCycle = require("./ReactLifeCycle");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactElement = require("./ReactElement");
var ReactInstanceMap = require("./ReactInstanceMap");
var ReactUpdates = require("./ReactUpdates");
var assign = require("./Object.assign");
var invariant = require("./invariant");
var warning = require("./warning");
function enqueueUpdate(internalInstance) {
if (internalInstance !== ReactLifeCycle.currentlyMountingInstance) {
// If we're in a componentWillMount handler, don't enqueue a rerender
// because ReactUpdates assumes we're in a browser context (which is
// wrong for server rendering) and we're about to do a render anyway.
// See bug in #1740.
ReactUpdates.enqueueUpdate(internalInstance);
}
}
function getInternalInstanceReadyForUpdate(publicInstance, callerName) {
("production" !== "production" ? invariant(
ReactCurrentOwner.current == null,
'%s(...): Cannot update during an existing state transition ' +
'(such as within `render`). Render methods should be a pure function ' +
'of props and state.',
callerName
) : invariant(ReactCurrentOwner.current == null));
var internalInstance = ReactInstanceMap.get(publicInstance);
if (!internalInstance) {
if ("production" !== "production") {
// Only warn when we have a callerName. Otherwise we should be silent.
// We're probably calling from enqueueCallback. We don't want to warn
// there because we already warned for the corresponding lifecycle method.
("production" !== "production" ? warning(
!callerName,
'%s(...): Can only update a mounted or mounting component. ' +
'This usually means you called %s() on an unmounted ' +
'component. This is a no-op.',
callerName,
callerName
) : null);
}
return null;
}
if (internalInstance === ReactLifeCycle.currentlyUnmountingInstance) {
return null;
}
return internalInstance;
}
/**
* ReactUpdateQueue allows for state updates to be scheduled into a later
* reconciliation step.
*/
var ReactUpdateQueue = {
/**
* Enqueue a callback that will be executed after all the pending updates
* have processed.
*
* @param {ReactClass} publicInstance The instance to use as `this` context.
* @param {?function} callback Called after state is updated.
* @internal
*/
enqueueCallback: function(publicInstance, callback) {
("production" !== "production" ? invariant(
typeof callback === 'function',
'enqueueCallback(...): You called `setProps`, `replaceProps`, ' +
'`setState`, `replaceState`, or `forceUpdate` with a callback that ' +
'isn\'t callable.'
) : invariant(typeof callback === 'function'));
var internalInstance = getInternalInstanceReadyForUpdate(publicInstance);
// Previously we would throw an error if we didn't have an internal
// instance. Since we want to make it a no-op instead, we mirror the same
// behavior we have in other enqueue* methods.
// We also need to ignore callbacks in componentWillMount. See
// enqueueUpdates.
if (!internalInstance ||
internalInstance === ReactLifeCycle.currentlyMountingInstance) {
return null;
}
if (internalInstance._pendingCallbacks) {
internalInstance._pendingCallbacks.push(callback);
} else {
internalInstance._pendingCallbacks = [callback];
}
// TODO: The callback here is ignored when setState is called from
// componentWillMount. Either fix it or disallow doing so completely in
// favor of getInitialState. Alternatively, we can disallow
// componentWillMount during server-side rendering.
enqueueUpdate(internalInstance);
},
enqueueCallbackInternal: function(internalInstance, callback) {
("production" !== "production" ? invariant(
typeof callback === 'function',
'enqueueCallback(...): You called `setProps`, `replaceProps`, ' +
'`setState`, `replaceState`, or `forceUpdate` with a callback that ' +
'isn\'t callable.'
) : invariant(typeof callback === 'function'));
if (internalInstance._pendingCallbacks) {
internalInstance._pendingCallbacks.push(callback);
} else {
internalInstance._pendingCallbacks = [callback];
}
enqueueUpdate(internalInstance);
},
/**
* Forces an update. This should only be invoked when it is known with
* certainty that we are **not** in a DOM transaction.
*
* You may want to call this when you know that some deeper aspect of the
* component's state has changed but `setState` was not called.
*
* This will not invoke `shouldUpdateComponent`, but it will invoke
* `componentWillUpdate` and `componentDidUpdate`.
*
* @param {ReactClass} publicInstance The instance that should rerender.
* @internal
*/
enqueueForceUpdate: function(publicInstance) {
var internalInstance = getInternalInstanceReadyForUpdate(
publicInstance,
'forceUpdate'
);
if (!internalInstance) {
return;
}
internalInstance._pendingForceUpdate = true;
enqueueUpdate(internalInstance);
},
/**
* Replaces all of the state. Always use this or `setState` to mutate state.
* You should treat `this.state` as immutable.
*
* There is no guarantee that `this.state` will be immediately updated, so
* accessing `this.state` after calling this method may return the old value.
*
* @param {ReactClass} publicInstance The instance that should rerender.
* @param {object} completeState Next state.
* @internal
*/
enqueueReplaceState: function(publicInstance, completeState) {
var internalInstance = getInternalInstanceReadyForUpdate(
publicInstance,
'replaceState'
);
if (!internalInstance) {
return;
}
internalInstance._pendingStateQueue = [completeState];
internalInstance._pendingReplaceState = true;
enqueueUpdate(internalInstance);
},
/**
* Sets a subset of the state. This only exists because _pendingState is
* internal. This provides a merging strategy that is not available to deep
* properties which is confusing. TODO: Expose pendingState or don't use it
* during the merge.
*
* @param {ReactClass} publicInstance The instance that should rerender.
* @param {object} partialState Next partial state to be merged with state.
* @internal
*/
enqueueSetState: function(publicInstance, partialState) {
var internalInstance = getInternalInstanceReadyForUpdate(
publicInstance,
'setState'
);
if (!internalInstance) {
return;
}
var queue =
internalInstance._pendingStateQueue ||
(internalInstance._pendingStateQueue = []);
queue.push(partialState);
enqueueUpdate(internalInstance);
},
/**
* Sets a subset of the props.
*
* @param {ReactClass} publicInstance The instance that should rerender.
* @param {object} partialProps Subset of the next props.
* @internal
*/
enqueueSetProps: function(publicInstance, partialProps) {
var internalInstance = getInternalInstanceReadyForUpdate(
publicInstance,
'setProps'
);
if (!internalInstance) {
return;
}
("production" !== "production" ? invariant(
internalInstance._isTopLevel,
'setProps(...): You called `setProps` on a ' +
'component with a parent. This is an anti-pattern since props will ' +
'get reactively updated when rendered. Instead, change the owner\'s ' +
'`render` method to pass the correct value as props to the component ' +
'where it is created.'
) : invariant(internalInstance._isTopLevel));
// Merge with the pending element if it exists, otherwise with existing
// element props.
var element = internalInstance._pendingElement ||
internalInstance._currentElement;
var props = assign({}, element.props, partialProps);
internalInstance._pendingElement = ReactElement.cloneAndReplaceProps(
element,
props
);
enqueueUpdate(internalInstance);
},
/**
* Replaces all of the props.
*
* @param {ReactClass} publicInstance The instance that should rerender.
* @param {object} props New props.
* @internal
*/
enqueueReplaceProps: function(publicInstance, props) {
var internalInstance = getInternalInstanceReadyForUpdate(
publicInstance,
'replaceProps'
);
if (!internalInstance) {
return;
}
("production" !== "production" ? invariant(
internalInstance._isTopLevel,
'replaceProps(...): You called `replaceProps` on a ' +
'component with a parent. This is an anti-pattern since props will ' +
'get reactively updated when rendered. Instead, change the owner\'s ' +
'`render` method to pass the correct value as props to the component ' +
'where it is created.'
) : invariant(internalInstance._isTopLevel));
// Merge with the pending element if it exists, otherwise with existing
// element props.
var element = internalInstance._pendingElement ||
internalInstance._currentElement;
internalInstance._pendingElement = ReactElement.cloneAndReplaceProps(
element,
props
);
enqueueUpdate(internalInstance);
},
enqueueElementInternal: function(internalInstance, newElement) {
internalInstance._pendingElement = newElement;
enqueueUpdate(internalInstance);
}
};
module.exports = ReactUpdateQueue;
},{"./Object.assign":33,"./ReactCurrentOwner":49,"./ReactElement":67,"./ReactInstanceMap":77,"./ReactLifeCycle":78,"./ReactUpdates":104,"./invariant":155,"./warning":176}],104:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactUpdates
*/
'use strict';
var CallbackQueue = require("./CallbackQueue");
var PooledClass = require("./PooledClass");
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactPerf = require("./ReactPerf");
var ReactReconciler = require("./ReactReconciler");
var Transaction = require("./Transaction");
var assign = require("./Object.assign");
var invariant = require("./invariant");
var warning = require("./warning");
var dirtyComponents = [];
var asapCallbackQueue = CallbackQueue.getPooled();
var asapEnqueued = false;
var batchingStrategy = null;
function ensureInjected() {
("production" !== "production" ? invariant(
ReactUpdates.ReactReconcileTransaction && batchingStrategy,
'ReactUpdates: must inject a reconcile transaction class and batching ' +
'strategy'
) : invariant(ReactUpdates.ReactReconcileTransaction && batchingStrategy));
}
var NESTED_UPDATES = {
initialize: function() {
this.dirtyComponentsLength = dirtyComponents.length;
},
close: function() {
if (this.dirtyComponentsLength !== dirtyComponents.length) {
// Additional updates were enqueued by componentDidUpdate handlers or
// similar; before our own UPDATE_QUEUEING wrapper closes, we want to run
// these new updates so that if A's componentDidUpdate calls setState on
// B, B will update before the callback A's updater provided when calling
// setState.
dirtyComponents.splice(0, this.dirtyComponentsLength);
flushBatchedUpdates();
} else {
dirtyComponents.length = 0;
}
}
};
var UPDATE_QUEUEING = {
initialize: function() {
this.callbackQueue.reset();
},
close: function() {
this.callbackQueue.notifyAll();
}
};
var TRANSACTION_WRAPPERS = [NESTED_UPDATES, UPDATE_QUEUEING];
function ReactUpdatesFlushTransaction() {
this.reinitializeTransaction();
this.dirtyComponentsLength = null;
this.callbackQueue = CallbackQueue.getPooled();
this.reconcileTransaction =
ReactUpdates.ReactReconcileTransaction.getPooled();
}
assign(
ReactUpdatesFlushTransaction.prototype,
Transaction.Mixin, {
getTransactionWrappers: function() {
return TRANSACTION_WRAPPERS;
},
destructor: function() {
this.dirtyComponentsLength = null;
CallbackQueue.release(this.callbackQueue);
this.callbackQueue = null;
ReactUpdates.ReactReconcileTransaction.release(this.reconcileTransaction);
this.reconcileTransaction = null;
},
perform: function(method, scope, a) {
// Essentially calls `this.reconcileTransaction.perform(method, scope, a)`
// with this transaction's wrappers around it.
return Transaction.Mixin.perform.call(
this,
this.reconcileTransaction.perform,
this.reconcileTransaction,
method,
scope,
a
);
}
});
PooledClass.addPoolingTo(ReactUpdatesFlushTransaction);
function batchedUpdates(callback, a, b, c, d) {
ensureInjected();
batchingStrategy.batchedUpdates(callback, a, b, c, d);
}
/**
* Array comparator for ReactComponents by mount ordering.
*
* @param {ReactComponent} c1 first component you're comparing
* @param {ReactComponent} c2 second component you're comparing
* @return {number} Return value usable by Array.prototype.sort().
*/
function mountOrderComparator(c1, c2) {
return c1._mountOrder - c2._mountOrder;
}
function runBatchedUpdates(transaction) {
var len = transaction.dirtyComponentsLength;
("production" !== "production" ? invariant(
len === dirtyComponents.length,
'Expected flush transaction\'s stored dirty-components length (%s) to ' +
'match dirty-components array length (%s).',
len,
dirtyComponents.length
) : invariant(len === dirtyComponents.length));
// Since reconciling a component higher in the owner hierarchy usually (not
// always -- see shouldComponentUpdate()) will reconcile children, reconcile
// them before their children by sorting the array.
dirtyComponents.sort(mountOrderComparator);
for (var i = 0; i < len; i++) {
// If a component is unmounted before pending changes apply, it will still
// be here, but we assume that it has cleared its _pendingCallbacks and
// that performUpdateIfNecessary is a noop.
var component = dirtyComponents[i];
// If performUpdateIfNecessary happens to enqueue any new updates, we
// shouldn't execute the callbacks until the next render happens, so
// stash the callbacks first
var callbacks = component._pendingCallbacks;
component._pendingCallbacks = null;
ReactReconciler.performUpdateIfNecessary(
component,
transaction.reconcileTransaction
);
if (callbacks) {
for (var j = 0; j < callbacks.length; j++) {
transaction.callbackQueue.enqueue(
callbacks[j],
component.getPublicInstance()
);
}
}
}
}
var flushBatchedUpdates = function() {
// ReactUpdatesFlushTransaction's wrappers will clear the dirtyComponents
// array and perform any updates enqueued by mount-ready handlers (i.e.,
// componentDidUpdate) but we need to check here too in order to catch
// updates enqueued by setState callbacks and asap calls.
while (dirtyComponents.length || asapEnqueued) {
if (dirtyComponents.length) {
var transaction = ReactUpdatesFlushTransaction.getPooled();
transaction.perform(runBatchedUpdates, null, transaction);
ReactUpdatesFlushTransaction.release(transaction);
}
if (asapEnqueued) {
asapEnqueued = false;
var queue = asapCallbackQueue;
asapCallbackQueue = CallbackQueue.getPooled();
queue.notifyAll();
CallbackQueue.release(queue);
}
}
};
flushBatchedUpdates = ReactPerf.measure(
'ReactUpdates',
'flushBatchedUpdates',
flushBatchedUpdates
);
/**
* Mark a component as needing a rerender, adding an optional callback to a
* list of functions which will be executed once the rerender occurs.
*/
function enqueueUpdate(component) {
ensureInjected();
// Various parts of our code (such as ReactCompositeComponent's
// _renderValidatedComponent) assume that calls to render aren't nested;
// verify that that's the case. (This is called by each top-level update
// function, like setProps, setState, forceUpdate, etc.; creation and
// destruction of top-level components is guarded in ReactMount.)
("production" !== "production" ? warning(
ReactCurrentOwner.current == null,
'enqueueUpdate(): Render methods should be a pure function of props ' +
'and state; triggering nested component updates from render is not ' +
'allowed. If necessary, trigger nested updates in ' +
'componentDidUpdate.'
) : null);
if (!batchingStrategy.isBatchingUpdates) {
batchingStrategy.batchedUpdates(enqueueUpdate, component);
return;
}
dirtyComponents.push(component);
}
/**
* Enqueue a callback to be run at the end of the current batching cycle. Throws
* if no updates are currently being performed.
*/
function asap(callback, context) {
("production" !== "production" ? invariant(
batchingStrategy.isBatchingUpdates,
'ReactUpdates.asap: Can\'t enqueue an asap callback in a context where' +
'updates are not being batched.'
) : invariant(batchingStrategy.isBatchingUpdates));
asapCallbackQueue.enqueue(callback, context);
asapEnqueued = true;
}
var ReactUpdatesInjection = {
injectReconcileTransaction: function(ReconcileTransaction) {
("production" !== "production" ? invariant(
ReconcileTransaction,
'ReactUpdates: must provide a reconcile transaction class'
) : invariant(ReconcileTransaction));
ReactUpdates.ReactReconcileTransaction = ReconcileTransaction;
},
injectBatchingStrategy: function(_batchingStrategy) {
("production" !== "production" ? invariant(
_batchingStrategy,
'ReactUpdates: must provide a batching strategy'
) : invariant(_batchingStrategy));
("production" !== "production" ? invariant(
typeof _batchingStrategy.batchedUpdates === 'function',
'ReactUpdates: must provide a batchedUpdates() function'
) : invariant(typeof _batchingStrategy.batchedUpdates === 'function'));
("production" !== "production" ? invariant(
typeof _batchingStrategy.isBatchingUpdates === 'boolean',
'ReactUpdates: must provide an isBatchingUpdates boolean attribute'
) : invariant(typeof _batchingStrategy.isBatchingUpdates === 'boolean'));
batchingStrategy = _batchingStrategy;
}
};
var ReactUpdates = {
/**
* React references `ReactReconcileTransaction` using this property in order
* to allow dependency injection.
*
* @internal
*/
ReactReconcileTransaction: null,
batchedUpdates: batchedUpdates,
enqueueUpdate: enqueueUpdate,
flushBatchedUpdates: flushBatchedUpdates,
injection: ReactUpdatesInjection,
asap: asap
};
module.exports = ReactUpdates;
},{"./CallbackQueue":11,"./Object.assign":33,"./PooledClass":34,"./ReactCurrentOwner":49,"./ReactPerf":86,"./ReactReconciler":93,"./Transaction":121,"./invariant":155,"./warning":176}],105:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactWithAddons
*/
/**
* This module exists purely in the open source project, and is meant as a way
* to create a separate standalone build of React. This build has "addons", or
* functionality we've built and think might be useful but doesn't have a good
* place to live inside React core.
*/
'use strict';
var LinkedStateMixin = require("./LinkedStateMixin");
var React = require("./React");
var ReactComponentWithPureRenderMixin =
require("./ReactComponentWithPureRenderMixin");
var ReactCSSTransitionGroup = require("./ReactCSSTransitionGroup");
var ReactFragment = require("./ReactFragment");
var ReactTransitionGroup = require("./ReactTransitionGroup");
var ReactUpdates = require("./ReactUpdates");
var cx = require("./cx");
var cloneWithProps = require("./cloneWithProps");
var update = require("./update");
React.addons = {
CSSTransitionGroup: ReactCSSTransitionGroup,
LinkedStateMixin: LinkedStateMixin,
PureRenderMixin: ReactComponentWithPureRenderMixin,
TransitionGroup: ReactTransitionGroup,
batchedUpdates: ReactUpdates.batchedUpdates,
classSet: cx,
cloneWithProps: cloneWithProps,
createFragment: ReactFragment.create,
update: update
};
if ("production" !== "production") {
React.addons.Perf = require("./ReactDefaultPerf");
React.addons.TestUtils = require("./ReactTestUtils");
}
module.exports = React;
},{"./LinkedStateMixin":29,"./React":35,"./ReactCSSTransitionGroup":38,"./ReactComponentWithPureRenderMixin":46,"./ReactDefaultPerf":65,"./ReactFragment":73,"./ReactTestUtils":99,"./ReactTransitionGroup":102,"./ReactUpdates":104,"./cloneWithProps":127,"./cx":132,"./update":175}],106:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SVGDOMPropertyConfig
*/
/*jslint bitwise: true*/
'use strict';
var DOMProperty = require("./DOMProperty");
var MUST_USE_ATTRIBUTE = DOMProperty.injection.MUST_USE_ATTRIBUTE;
var SVGDOMPropertyConfig = {
Properties: {
clipPath: MUST_USE_ATTRIBUTE,
cx: MUST_USE_ATTRIBUTE,
cy: MUST_USE_ATTRIBUTE,
d: MUST_USE_ATTRIBUTE,
dx: MUST_USE_ATTRIBUTE,
dy: MUST_USE_ATTRIBUTE,
fill: MUST_USE_ATTRIBUTE,
fillOpacity: MUST_USE_ATTRIBUTE,
fontFamily: MUST_USE_ATTRIBUTE,
fontSize: MUST_USE_ATTRIBUTE,
fx: MUST_USE_ATTRIBUTE,
fy: MUST_USE_ATTRIBUTE,
gradientTransform: MUST_USE_ATTRIBUTE,
gradientUnits: MUST_USE_ATTRIBUTE,
markerEnd: MUST_USE_ATTRIBUTE,
markerMid: MUST_USE_ATTRIBUTE,
markerStart: MUST_USE_ATTRIBUTE,
offset: MUST_USE_ATTRIBUTE,
opacity: MUST_USE_ATTRIBUTE,
patternContentUnits: MUST_USE_ATTRIBUTE,
patternUnits: MUST_USE_ATTRIBUTE,
points: MUST_USE_ATTRIBUTE,
preserveAspectRatio: MUST_USE_ATTRIBUTE,
r: MUST_USE_ATTRIBUTE,
rx: MUST_USE_ATTRIBUTE,
ry: MUST_USE_ATTRIBUTE,
spreadMethod: MUST_USE_ATTRIBUTE,
stopColor: MUST_USE_ATTRIBUTE,
stopOpacity: MUST_USE_ATTRIBUTE,
stroke: MUST_USE_ATTRIBUTE,
strokeDasharray: MUST_USE_ATTRIBUTE,
strokeLinecap: MUST_USE_ATTRIBUTE,
strokeOpacity: MUST_USE_ATTRIBUTE,
strokeWidth: MUST_USE_ATTRIBUTE,
textAnchor: MUST_USE_ATTRIBUTE,
transform: MUST_USE_ATTRIBUTE,
version: MUST_USE_ATTRIBUTE,
viewBox: MUST_USE_ATTRIBUTE,
x1: MUST_USE_ATTRIBUTE,
x2: MUST_USE_ATTRIBUTE,
x: MUST_USE_ATTRIBUTE,
y1: MUST_USE_ATTRIBUTE,
y2: MUST_USE_ATTRIBUTE,
y: MUST_USE_ATTRIBUTE
},
DOMAttributeNames: {
clipPath: 'clip-path',
fillOpacity: 'fill-opacity',
fontFamily: 'font-family',
fontSize: 'font-size',
gradientTransform: 'gradientTransform',
gradientUnits: 'gradientUnits',
markerEnd: 'marker-end',
markerMid: 'marker-mid',
markerStart: 'marker-start',
patternContentUnits: 'patternContentUnits',
patternUnits: 'patternUnits',
preserveAspectRatio: 'preserveAspectRatio',
spreadMethod: 'spreadMethod',
stopColor: 'stop-color',
stopOpacity: 'stop-opacity',
strokeDasharray: 'stroke-dasharray',
strokeLinecap: 'stroke-linecap',
strokeOpacity: 'stroke-opacity',
strokeWidth: 'stroke-width',
textAnchor: 'text-anchor',
viewBox: 'viewBox'
}
};
module.exports = SVGDOMPropertyConfig;
},{"./DOMProperty":15}],107:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SelectEventPlugin
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPropagators = require("./EventPropagators");
var ReactInputSelection = require("./ReactInputSelection");
var SyntheticEvent = require("./SyntheticEvent");
var getActiveElement = require("./getActiveElement");
var isTextInputElement = require("./isTextInputElement");
var keyOf = require("./keyOf");
var shallowEqual = require("./shallowEqual");
var topLevelTypes = EventConstants.topLevelTypes;
var eventTypes = {
select: {
phasedRegistrationNames: {
bubbled: keyOf({onSelect: null}),
captured: keyOf({onSelectCapture: null})
},
dependencies: [
topLevelTypes.topBlur,
topLevelTypes.topContextMenu,
topLevelTypes.topFocus,
topLevelTypes.topKeyDown,
topLevelTypes.topMouseDown,
topLevelTypes.topMouseUp,
topLevelTypes.topSelectionChange
]
}
};
var activeElement = null;
var activeElementID = null;
var lastSelection = null;
var mouseDown = false;
/**
* Get an object which is a unique representation of the current selection.
*
* The return value will not be consistent across nodes or browsers, but
* two identical selections on the same node will return identical objects.
*
* @param {DOMElement} node
* @param {object}
*/
function getSelection(node) {
if ('selectionStart' in node &&
ReactInputSelection.hasSelectionCapabilities(node)) {
return {
start: node.selectionStart,
end: node.selectionEnd
};
} else if (window.getSelection) {
var selection = window.getSelection();
return {
anchorNode: selection.anchorNode,
anchorOffset: selection.anchorOffset,
focusNode: selection.focusNode,
focusOffset: selection.focusOffset
};
} else if (document.selection) {
var range = document.selection.createRange();
return {
parentElement: range.parentElement(),
text: range.text,
top: range.boundingTop,
left: range.boundingLeft
};
}
}
/**
* Poll selection to see whether it's changed.
*
* @param {object} nativeEvent
* @return {?SyntheticEvent}
*/
function constructSelectEvent(nativeEvent) {
// Ensure we have the right element, and that the user is not dragging a
// selection (this matches native `select` event behavior). In HTML5, select
// fires only on input and textarea thus if there's no focused element we
// won't dispatch.
if (mouseDown ||
activeElement == null ||
activeElement !== getActiveElement()) {
return null;
}
// Only fire when selection has actually changed.
var currentSelection = getSelection(activeElement);
if (!lastSelection || !shallowEqual(lastSelection, currentSelection)) {
lastSelection = currentSelection;
var syntheticEvent = SyntheticEvent.getPooled(
eventTypes.select,
activeElementID,
nativeEvent
);
syntheticEvent.type = 'select';
syntheticEvent.target = activeElement;
EventPropagators.accumulateTwoPhaseDispatches(syntheticEvent);
return syntheticEvent;
}
}
/**
* This plugin creates an `onSelect` event that normalizes select events
* across form elements.
*
* Supported elements are:
* - input (see `isTextInputElement`)
* - textarea
* - contentEditable
*
* This differs from native browser implementations in the following ways:
* - Fires on contentEditable fields as well as inputs.
* - Fires for collapsed selection.
* - Fires after user input.
*/
var SelectEventPlugin = {
eventTypes: eventTypes,
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @see {EventPluginHub.extractEvents}
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
switch (topLevelType) {
// Track the input node that has focus.
case topLevelTypes.topFocus:
if (isTextInputElement(topLevelTarget) ||
topLevelTarget.contentEditable === 'true') {
activeElement = topLevelTarget;
activeElementID = topLevelTargetID;
lastSelection = null;
}
break;
case topLevelTypes.topBlur:
activeElement = null;
activeElementID = null;
lastSelection = null;
break;
// Don't fire the event while the user is dragging. This matches the
// semantics of the native select event.
case topLevelTypes.topMouseDown:
mouseDown = true;
break;
case topLevelTypes.topContextMenu:
case topLevelTypes.topMouseUp:
mouseDown = false;
return constructSelectEvent(nativeEvent);
// Chrome and IE fire non-standard event when selection is changed (and
// sometimes when it hasn't).
// Firefox doesn't support selectionchange, so check selection status
// after each key entry. The selection changes after keydown and before
// keyup, but we check on keydown as well in the case of holding down a
// key, when multiple keydown events are fired but only one keyup is.
case topLevelTypes.topSelectionChange:
case topLevelTypes.topKeyDown:
case topLevelTypes.topKeyUp:
return constructSelectEvent(nativeEvent);
}
}
};
module.exports = SelectEventPlugin;
},{"./EventConstants":20,"./EventPropagators":25,"./ReactInputSelection":75,"./SyntheticEvent":113,"./getActiveElement":141,"./isTextInputElement":158,"./keyOf":162,"./shallowEqual":171}],108:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ServerReactRootIndex
* @typechecks
*/
'use strict';
/**
* Size of the reactRoot ID space. We generate random numbers for React root
* IDs and if there's a collision the events and DOM update system will
* get confused. In the future we need a way to generate GUIDs but for
* now this will work on a smaller scale.
*/
var GLOBAL_MOUNT_POINT_MAX = Math.pow(2, 53);
var ServerReactRootIndex = {
createReactRootIndex: function() {
return Math.ceil(Math.random() * GLOBAL_MOUNT_POINT_MAX);
}
};
module.exports = ServerReactRootIndex;
},{}],109:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SimpleEventPlugin
*/
'use strict';
var EventConstants = require("./EventConstants");
var EventPluginUtils = require("./EventPluginUtils");
var EventPropagators = require("./EventPropagators");
var SyntheticClipboardEvent = require("./SyntheticClipboardEvent");
var SyntheticEvent = require("./SyntheticEvent");
var SyntheticFocusEvent = require("./SyntheticFocusEvent");
var SyntheticKeyboardEvent = require("./SyntheticKeyboardEvent");
var SyntheticMouseEvent = require("./SyntheticMouseEvent");
var SyntheticDragEvent = require("./SyntheticDragEvent");
var SyntheticTouchEvent = require("./SyntheticTouchEvent");
var SyntheticUIEvent = require("./SyntheticUIEvent");
var SyntheticWheelEvent = require("./SyntheticWheelEvent");
var getEventCharCode = require("./getEventCharCode");
var invariant = require("./invariant");
var keyOf = require("./keyOf");
var warning = require("./warning");
var topLevelTypes = EventConstants.topLevelTypes;
var eventTypes = {
blur: {
phasedRegistrationNames: {
bubbled: keyOf({onBlur: true}),
captured: keyOf({onBlurCapture: true})
}
},
click: {
phasedRegistrationNames: {
bubbled: keyOf({onClick: true}),
captured: keyOf({onClickCapture: true})
}
},
contextMenu: {
phasedRegistrationNames: {
bubbled: keyOf({onContextMenu: true}),
captured: keyOf({onContextMenuCapture: true})
}
},
copy: {
phasedRegistrationNames: {
bubbled: keyOf({onCopy: true}),
captured: keyOf({onCopyCapture: true})
}
},
cut: {
phasedRegistrationNames: {
bubbled: keyOf({onCut: true}),
captured: keyOf({onCutCapture: true})
}
},
doubleClick: {
phasedRegistrationNames: {
bubbled: keyOf({onDoubleClick: true}),
captured: keyOf({onDoubleClickCapture: true})
}
},
drag: {
phasedRegistrationNames: {
bubbled: keyOf({onDrag: true}),
captured: keyOf({onDragCapture: true})
}
},
dragEnd: {
phasedRegistrationNames: {
bubbled: keyOf({onDragEnd: true}),
captured: keyOf({onDragEndCapture: true})
}
},
dragEnter: {
phasedRegistrationNames: {
bubbled: keyOf({onDragEnter: true}),
captured: keyOf({onDragEnterCapture: true})
}
},
dragExit: {
phasedRegistrationNames: {
bubbled: keyOf({onDragExit: true}),
captured: keyOf({onDragExitCapture: true})
}
},
dragLeave: {
phasedRegistrationNames: {
bubbled: keyOf({onDragLeave: true}),
captured: keyOf({onDragLeaveCapture: true})
}
},
dragOver: {
phasedRegistrationNames: {
bubbled: keyOf({onDragOver: true}),
captured: keyOf({onDragOverCapture: true})
}
},
dragStart: {
phasedRegistrationNames: {
bubbled: keyOf({onDragStart: true}),
captured: keyOf({onDragStartCapture: true})
}
},
drop: {
phasedRegistrationNames: {
bubbled: keyOf({onDrop: true}),
captured: keyOf({onDropCapture: true})
}
},
focus: {
phasedRegistrationNames: {
bubbled: keyOf({onFocus: true}),
captured: keyOf({onFocusCapture: true})
}
},
input: {
phasedRegistrationNames: {
bubbled: keyOf({onInput: true}),
captured: keyOf({onInputCapture: true})
}
},
keyDown: {
phasedRegistrationNames: {
bubbled: keyOf({onKeyDown: true}),
captured: keyOf({onKeyDownCapture: true})
}
},
keyPress: {
phasedRegistrationNames: {
bubbled: keyOf({onKeyPress: true}),
captured: keyOf({onKeyPressCapture: true})
}
},
keyUp: {
phasedRegistrationNames: {
bubbled: keyOf({onKeyUp: true}),
captured: keyOf({onKeyUpCapture: true})
}
},
load: {
phasedRegistrationNames: {
bubbled: keyOf({onLoad: true}),
captured: keyOf({onLoadCapture: true})
}
},
error: {
phasedRegistrationNames: {
bubbled: keyOf({onError: true}),
captured: keyOf({onErrorCapture: true})
}
},
// Note: We do not allow listening to mouseOver events. Instead, use the
// onMouseEnter/onMouseLeave created by `EnterLeaveEventPlugin`.
mouseDown: {
phasedRegistrationNames: {
bubbled: keyOf({onMouseDown: true}),
captured: keyOf({onMouseDownCapture: true})
}
},
mouseMove: {
phasedRegistrationNames: {
bubbled: keyOf({onMouseMove: true}),
captured: keyOf({onMouseMoveCapture: true})
}
},
mouseOut: {
phasedRegistrationNames: {
bubbled: keyOf({onMouseOut: true}),
captured: keyOf({onMouseOutCapture: true})
}
},
mouseOver: {
phasedRegistrationNames: {
bubbled: keyOf({onMouseOver: true}),
captured: keyOf({onMouseOverCapture: true})
}
},
mouseUp: {
phasedRegistrationNames: {
bubbled: keyOf({onMouseUp: true}),
captured: keyOf({onMouseUpCapture: true})
}
},
paste: {
phasedRegistrationNames: {
bubbled: keyOf({onPaste: true}),
captured: keyOf({onPasteCapture: true})
}
},
reset: {
phasedRegistrationNames: {
bubbled: keyOf({onReset: true}),
captured: keyOf({onResetCapture: true})
}
},
scroll: {
phasedRegistrationNames: {
bubbled: keyOf({onScroll: true}),
captured: keyOf({onScrollCapture: true})
}
},
submit: {
phasedRegistrationNames: {
bubbled: keyOf({onSubmit: true}),
captured: keyOf({onSubmitCapture: true})
}
},
touchCancel: {
phasedRegistrationNames: {
bubbled: keyOf({onTouchCancel: true}),
captured: keyOf({onTouchCancelCapture: true})
}
},
touchEnd: {
phasedRegistrationNames: {
bubbled: keyOf({onTouchEnd: true}),
captured: keyOf({onTouchEndCapture: true})
}
},
touchMove: {
phasedRegistrationNames: {
bubbled: keyOf({onTouchMove: true}),
captured: keyOf({onTouchMoveCapture: true})
}
},
touchStart: {
phasedRegistrationNames: {
bubbled: keyOf({onTouchStart: true}),
captured: keyOf({onTouchStartCapture: true})
}
},
wheel: {
phasedRegistrationNames: {
bubbled: keyOf({onWheel: true}),
captured: keyOf({onWheelCapture: true})
}
}
};
var topLevelEventsToDispatchConfig = {
topBlur: eventTypes.blur,
topClick: eventTypes.click,
topContextMenu: eventTypes.contextMenu,
topCopy: eventTypes.copy,
topCut: eventTypes.cut,
topDoubleClick: eventTypes.doubleClick,
topDrag: eventTypes.drag,
topDragEnd: eventTypes.dragEnd,
topDragEnter: eventTypes.dragEnter,
topDragExit: eventTypes.dragExit,
topDragLeave: eventTypes.dragLeave,
topDragOver: eventTypes.dragOver,
topDragStart: eventTypes.dragStart,
topDrop: eventTypes.drop,
topError: eventTypes.error,
topFocus: eventTypes.focus,
topInput: eventTypes.input,
topKeyDown: eventTypes.keyDown,
topKeyPress: eventTypes.keyPress,
topKeyUp: eventTypes.keyUp,
topLoad: eventTypes.load,
topMouseDown: eventTypes.mouseDown,
topMouseMove: eventTypes.mouseMove,
topMouseOut: eventTypes.mouseOut,
topMouseOver: eventTypes.mouseOver,
topMouseUp: eventTypes.mouseUp,
topPaste: eventTypes.paste,
topReset: eventTypes.reset,
topScroll: eventTypes.scroll,
topSubmit: eventTypes.submit,
topTouchCancel: eventTypes.touchCancel,
topTouchEnd: eventTypes.touchEnd,
topTouchMove: eventTypes.touchMove,
topTouchStart: eventTypes.touchStart,
topWheel: eventTypes.wheel
};
for (var type in topLevelEventsToDispatchConfig) {
topLevelEventsToDispatchConfig[type].dependencies = [type];
}
var SimpleEventPlugin = {
eventTypes: eventTypes,
/**
* Same as the default implementation, except cancels the event when return
* value is false. This behavior will be disabled in a future release.
*
* @param {object} Event to be dispatched.
* @param {function} Application-level callback.
* @param {string} domID DOM ID to pass to the callback.
*/
executeDispatch: function(event, listener, domID) {
var returnValue = EventPluginUtils.executeDispatch(event, listener, domID);
("production" !== "production" ? warning(
typeof returnValue !== 'boolean',
'Returning `false` from an event handler is deprecated and will be ' +
'ignored in a future release. Instead, manually call ' +
'e.stopPropagation() or e.preventDefault(), as appropriate.'
) : null);
if (returnValue === false) {
event.stopPropagation();
event.preventDefault();
}
},
/**
* @param {string} topLevelType Record from `EventConstants`.
* @param {DOMEventTarget} topLevelTarget The listening component root node.
* @param {string} topLevelTargetID ID of `topLevelTarget`.
* @param {object} nativeEvent Native browser event.
* @return {*} An accumulation of synthetic events.
* @see {EventPluginHub.extractEvents}
*/
extractEvents: function(
topLevelType,
topLevelTarget,
topLevelTargetID,
nativeEvent) {
var dispatchConfig = topLevelEventsToDispatchConfig[topLevelType];
if (!dispatchConfig) {
return null;
}
var EventConstructor;
switch (topLevelType) {
case topLevelTypes.topInput:
case topLevelTypes.topLoad:
case topLevelTypes.topError:
case topLevelTypes.topReset:
case topLevelTypes.topSubmit:
// HTML Events
// @see http://www.w3.org/TR/html5/index.html#events-0
EventConstructor = SyntheticEvent;
break;
case topLevelTypes.topKeyPress:
// FireFox creates a keypress event for function keys too. This removes
// the unwanted keypress events. Enter is however both printable and
// non-printable. One would expect Tab to be as well (but it isn't).
if (getEventCharCode(nativeEvent) === 0) {
return null;
}
/* falls through */
case topLevelTypes.topKeyDown:
case topLevelTypes.topKeyUp:
EventConstructor = SyntheticKeyboardEvent;
break;
case topLevelTypes.topBlur:
case topLevelTypes.topFocus:
EventConstructor = SyntheticFocusEvent;
break;
case topLevelTypes.topClick:
// Firefox creates a click event on right mouse clicks. This removes the
// unwanted click events.
if (nativeEvent.button === 2) {
return null;
}
/* falls through */
case topLevelTypes.topContextMenu:
case topLevelTypes.topDoubleClick:
case topLevelTypes.topMouseDown:
case topLevelTypes.topMouseMove:
case topLevelTypes.topMouseOut:
case topLevelTypes.topMouseOver:
case topLevelTypes.topMouseUp:
EventConstructor = SyntheticMouseEvent;
break;
case topLevelTypes.topDrag:
case topLevelTypes.topDragEnd:
case topLevelTypes.topDragEnter:
case topLevelTypes.topDragExit:
case topLevelTypes.topDragLeave:
case topLevelTypes.topDragOver:
case topLevelTypes.topDragStart:
case topLevelTypes.topDrop:
EventConstructor = SyntheticDragEvent;
break;
case topLevelTypes.topTouchCancel:
case topLevelTypes.topTouchEnd:
case topLevelTypes.topTouchMove:
case topLevelTypes.topTouchStart:
EventConstructor = SyntheticTouchEvent;
break;
case topLevelTypes.topScroll:
EventConstructor = SyntheticUIEvent;
break;
case topLevelTypes.topWheel:
EventConstructor = SyntheticWheelEvent;
break;
case topLevelTypes.topCopy:
case topLevelTypes.topCut:
case topLevelTypes.topPaste:
EventConstructor = SyntheticClipboardEvent;
break;
}
("production" !== "production" ? invariant(
EventConstructor,
'SimpleEventPlugin: Unhandled event type, `%s`.',
topLevelType
) : invariant(EventConstructor));
var event = EventConstructor.getPooled(
dispatchConfig,
topLevelTargetID,
nativeEvent
);
EventPropagators.accumulateTwoPhaseDispatches(event);
return event;
}
};
module.exports = SimpleEventPlugin;
},{"./EventConstants":20,"./EventPluginUtils":24,"./EventPropagators":25,"./SyntheticClipboardEvent":110,"./SyntheticDragEvent":112,"./SyntheticEvent":113,"./SyntheticFocusEvent":114,"./SyntheticKeyboardEvent":116,"./SyntheticMouseEvent":117,"./SyntheticTouchEvent":118,"./SyntheticUIEvent":119,"./SyntheticWheelEvent":120,"./getEventCharCode":142,"./invariant":155,"./keyOf":162,"./warning":176}],110:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticClipboardEvent
* @typechecks static-only
*/
'use strict';
var SyntheticEvent = require("./SyntheticEvent");
/**
* @interface Event
* @see http://www.w3.org/TR/clipboard-apis/
*/
var ClipboardEventInterface = {
clipboardData: function(event) {
return (
'clipboardData' in event ?
event.clipboardData :
window.clipboardData
);
}
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticClipboardEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticEvent.augmentClass(SyntheticClipboardEvent, ClipboardEventInterface);
module.exports = SyntheticClipboardEvent;
},{"./SyntheticEvent":113}],111:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticCompositionEvent
* @typechecks static-only
*/
'use strict';
var SyntheticEvent = require("./SyntheticEvent");
/**
* @interface Event
* @see http://www.w3.org/TR/DOM-Level-3-Events/#events-compositionevents
*/
var CompositionEventInterface = {
data: null
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticCompositionEvent(
dispatchConfig,
dispatchMarker,
nativeEvent) {
SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticEvent.augmentClass(
SyntheticCompositionEvent,
CompositionEventInterface
);
module.exports = SyntheticCompositionEvent;
},{"./SyntheticEvent":113}],112:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticDragEvent
* @typechecks static-only
*/
'use strict';
var SyntheticMouseEvent = require("./SyntheticMouseEvent");
/**
* @interface DragEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var DragEventInterface = {
dataTransfer: null
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticDragEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticMouseEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticMouseEvent.augmentClass(SyntheticDragEvent, DragEventInterface);
module.exports = SyntheticDragEvent;
},{"./SyntheticMouseEvent":117}],113:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticEvent
* @typechecks static-only
*/
'use strict';
var PooledClass = require("./PooledClass");
var assign = require("./Object.assign");
var emptyFunction = require("./emptyFunction");
var getEventTarget = require("./getEventTarget");
/**
* @interface Event
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var EventInterface = {
type: null,
target: getEventTarget,
// currentTarget is set when dispatching; no use in copying it here
currentTarget: emptyFunction.thatReturnsNull,
eventPhase: null,
bubbles: null,
cancelable: null,
timeStamp: function(event) {
return event.timeStamp || Date.now();
},
defaultPrevented: null,
isTrusted: null
};
/**
* Synthetic events are dispatched by event plugins, typically in response to a
* top-level event delegation handler.
*
* These systems should generally use pooling to reduce the frequency of garbage
* collection. The system should check `isPersistent` to determine whether the
* event should be released into the pool after being dispatched. Users that
* need a persisted event should invoke `persist`.
*
* Synthetic events (and subclasses) implement the DOM Level 3 Events API by
* normalizing browser quirks. Subclasses do not necessarily have to implement a
* DOM interface; custom application-specific events can also subclass this.
*
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
*/
function SyntheticEvent(dispatchConfig, dispatchMarker, nativeEvent) {
this.dispatchConfig = dispatchConfig;
this.dispatchMarker = dispatchMarker;
this.nativeEvent = nativeEvent;
var Interface = this.constructor.Interface;
for (var propName in Interface) {
if (!Interface.hasOwnProperty(propName)) {
continue;
}
var normalize = Interface[propName];
if (normalize) {
this[propName] = normalize(nativeEvent);
} else {
this[propName] = nativeEvent[propName];
}
}
var defaultPrevented = nativeEvent.defaultPrevented != null ?
nativeEvent.defaultPrevented :
nativeEvent.returnValue === false;
if (defaultPrevented) {
this.isDefaultPrevented = emptyFunction.thatReturnsTrue;
} else {
this.isDefaultPrevented = emptyFunction.thatReturnsFalse;
}
this.isPropagationStopped = emptyFunction.thatReturnsFalse;
}
assign(SyntheticEvent.prototype, {
preventDefault: function() {
this.defaultPrevented = true;
var event = this.nativeEvent;
if (event.preventDefault) {
event.preventDefault();
} else {
event.returnValue = false;
}
this.isDefaultPrevented = emptyFunction.thatReturnsTrue;
},
stopPropagation: function() {
var event = this.nativeEvent;
if (event.stopPropagation) {
event.stopPropagation();
} else {
event.cancelBubble = true;
}
this.isPropagationStopped = emptyFunction.thatReturnsTrue;
},
/**
* We release all dispatched `SyntheticEvent`s after each event loop, adding
* them back into the pool. This allows a way to hold onto a reference that
* won't be added back into the pool.
*/
persist: function() {
this.isPersistent = emptyFunction.thatReturnsTrue;
},
/**
* Checks if this event should be released back into the pool.
*
* @return {boolean} True if this should not be released, false otherwise.
*/
isPersistent: emptyFunction.thatReturnsFalse,
/**
* `PooledClass` looks for `destructor` on each instance it releases.
*/
destructor: function() {
var Interface = this.constructor.Interface;
for (var propName in Interface) {
this[propName] = null;
}
this.dispatchConfig = null;
this.dispatchMarker = null;
this.nativeEvent = null;
}
});
SyntheticEvent.Interface = EventInterface;
/**
* Helper to reduce boilerplate when creating subclasses.
*
* @param {function} Class
* @param {?object} Interface
*/
SyntheticEvent.augmentClass = function(Class, Interface) {
var Super = this;
var prototype = Object.create(Super.prototype);
assign(prototype, Class.prototype);
Class.prototype = prototype;
Class.prototype.constructor = Class;
Class.Interface = assign({}, Super.Interface, Interface);
Class.augmentClass = Super.augmentClass;
PooledClass.addPoolingTo(Class, PooledClass.threeArgumentPooler);
};
PooledClass.addPoolingTo(SyntheticEvent, PooledClass.threeArgumentPooler);
module.exports = SyntheticEvent;
},{"./Object.assign":33,"./PooledClass":34,"./emptyFunction":134,"./getEventTarget":145}],114:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticFocusEvent
* @typechecks static-only
*/
'use strict';
var SyntheticUIEvent = require("./SyntheticUIEvent");
/**
* @interface FocusEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var FocusEventInterface = {
relatedTarget: null
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticFocusEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticUIEvent.augmentClass(SyntheticFocusEvent, FocusEventInterface);
module.exports = SyntheticFocusEvent;
},{"./SyntheticUIEvent":119}],115:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticInputEvent
* @typechecks static-only
*/
'use strict';
var SyntheticEvent = require("./SyntheticEvent");
/**
* @interface Event
* @see http://www.w3.org/TR/2013/WD-DOM-Level-3-Events-20131105
* /#events-inputevents
*/
var InputEventInterface = {
data: null
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticInputEvent(
dispatchConfig,
dispatchMarker,
nativeEvent) {
SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticEvent.augmentClass(
SyntheticInputEvent,
InputEventInterface
);
module.exports = SyntheticInputEvent;
},{"./SyntheticEvent":113}],116:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticKeyboardEvent
* @typechecks static-only
*/
'use strict';
var SyntheticUIEvent = require("./SyntheticUIEvent");
var getEventCharCode = require("./getEventCharCode");
var getEventKey = require("./getEventKey");
var getEventModifierState = require("./getEventModifierState");
/**
* @interface KeyboardEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var KeyboardEventInterface = {
key: getEventKey,
location: null,
ctrlKey: null,
shiftKey: null,
altKey: null,
metaKey: null,
repeat: null,
locale: null,
getModifierState: getEventModifierState,
// Legacy Interface
charCode: function(event) {
// `charCode` is the result of a KeyPress event and represents the value of
// the actual printable character.
// KeyPress is deprecated, but its replacement is not yet final and not
// implemented in any major browser. Only KeyPress has charCode.
if (event.type === 'keypress') {
return getEventCharCode(event);
}
return 0;
},
keyCode: function(event) {
// `keyCode` is the result of a KeyDown/Up event and represents the value of
// physical keyboard key.
// The actual meaning of the value depends on the users' keyboard layout
// which cannot be detected. Assuming that it is a US keyboard layout
// provides a surprisingly accurate mapping for US and European users.
// Due to this, it is left to the user to implement at this time.
if (event.type === 'keydown' || event.type === 'keyup') {
return event.keyCode;
}
return 0;
},
which: function(event) {
// `which` is an alias for either `keyCode` or `charCode` depending on the
// type of the event.
if (event.type === 'keypress') {
return getEventCharCode(event);
}
if (event.type === 'keydown' || event.type === 'keyup') {
return event.keyCode;
}
return 0;
}
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticKeyboardEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticUIEvent.augmentClass(SyntheticKeyboardEvent, KeyboardEventInterface);
module.exports = SyntheticKeyboardEvent;
},{"./SyntheticUIEvent":119,"./getEventCharCode":142,"./getEventKey":143,"./getEventModifierState":144}],117:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticMouseEvent
* @typechecks static-only
*/
'use strict';
var SyntheticUIEvent = require("./SyntheticUIEvent");
var ViewportMetrics = require("./ViewportMetrics");
var getEventModifierState = require("./getEventModifierState");
/**
* @interface MouseEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var MouseEventInterface = {
screenX: null,
screenY: null,
clientX: null,
clientY: null,
ctrlKey: null,
shiftKey: null,
altKey: null,
metaKey: null,
getModifierState: getEventModifierState,
button: function(event) {
// Webkit, Firefox, IE9+
// which: 1 2 3
// button: 0 1 2 (standard)
var button = event.button;
if ('which' in event) {
return button;
}
// IE<9
// which: undefined
// button: 0 0 0
// button: 1 4 2 (onmouseup)
return button === 2 ? 2 : button === 4 ? 1 : 0;
},
buttons: null,
relatedTarget: function(event) {
return event.relatedTarget || (
((event.fromElement === event.srcElement ? event.toElement : event.fromElement))
);
},
// "Proprietary" Interface.
pageX: function(event) {
return 'pageX' in event ?
event.pageX :
event.clientX + ViewportMetrics.currentScrollLeft;
},
pageY: function(event) {
return 'pageY' in event ?
event.pageY :
event.clientY + ViewportMetrics.currentScrollTop;
}
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticMouseEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticUIEvent.augmentClass(SyntheticMouseEvent, MouseEventInterface);
module.exports = SyntheticMouseEvent;
},{"./SyntheticUIEvent":119,"./ViewportMetrics":122,"./getEventModifierState":144}],118:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticTouchEvent
* @typechecks static-only
*/
'use strict';
var SyntheticUIEvent = require("./SyntheticUIEvent");
var getEventModifierState = require("./getEventModifierState");
/**
* @interface TouchEvent
* @see http://www.w3.org/TR/touch-events/
*/
var TouchEventInterface = {
touches: null,
targetTouches: null,
changedTouches: null,
altKey: null,
metaKey: null,
ctrlKey: null,
shiftKey: null,
getModifierState: getEventModifierState
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticUIEvent}
*/
function SyntheticTouchEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticUIEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticUIEvent.augmentClass(SyntheticTouchEvent, TouchEventInterface);
module.exports = SyntheticTouchEvent;
},{"./SyntheticUIEvent":119,"./getEventModifierState":144}],119:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticUIEvent
* @typechecks static-only
*/
'use strict';
var SyntheticEvent = require("./SyntheticEvent");
var getEventTarget = require("./getEventTarget");
/**
* @interface UIEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var UIEventInterface = {
view: function(event) {
if (event.view) {
return event.view;
}
var target = getEventTarget(event);
if (target != null && target.window === target) {
// target is a window object
return target;
}
var doc = target.ownerDocument;
// TODO: Figure out why `ownerDocument` is sometimes undefined in IE8.
if (doc) {
return doc.defaultView || doc.parentWindow;
} else {
return window;
}
},
detail: function(event) {
return event.detail || 0;
}
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticEvent}
*/
function SyntheticUIEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticEvent.augmentClass(SyntheticUIEvent, UIEventInterface);
module.exports = SyntheticUIEvent;
},{"./SyntheticEvent":113,"./getEventTarget":145}],120:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticWheelEvent
* @typechecks static-only
*/
'use strict';
var SyntheticMouseEvent = require("./SyntheticMouseEvent");
/**
* @interface WheelEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var WheelEventInterface = {
deltaX: function(event) {
return (
'deltaX' in event ? event.deltaX :
// Fallback to `wheelDeltaX` for Webkit and normalize (right is positive).
'wheelDeltaX' in event ? -event.wheelDeltaX : 0
);
},
deltaY: function(event) {
return (
'deltaY' in event ? event.deltaY :
// Fallback to `wheelDeltaY` for Webkit and normalize (down is positive).
'wheelDeltaY' in event ? -event.wheelDeltaY :
// Fallback to `wheelDelta` for IE<9 and normalize (down is positive).
'wheelDelta' in event ? -event.wheelDelta : 0
);
},
deltaZ: null,
// Browsers without "deltaMode" is reporting in raw wheel delta where one
// notch on the scroll is always +/- 120, roughly equivalent to pixels.
// A good approximation of DOM_DELTA_LINE (1) is 5% of viewport size or
// ~40 pixels, for DOM_DELTA_SCREEN (2) it is 87.5% of viewport size.
deltaMode: null
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticMouseEvent}
*/
function SyntheticWheelEvent(dispatchConfig, dispatchMarker, nativeEvent) {
SyntheticMouseEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent);
}
SyntheticMouseEvent.augmentClass(SyntheticWheelEvent, WheelEventInterface);
module.exports = SyntheticWheelEvent;
},{"./SyntheticMouseEvent":117}],121:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule Transaction
*/
'use strict';
var invariant = require("./invariant");
/**
* `Transaction` creates a black box that is able to wrap any method such that
* certain invariants are maintained before and after the method is invoked
* (Even if an exception is thrown while invoking the wrapped method). Whoever
* instantiates a transaction can provide enforcers of the invariants at
* creation time. The `Transaction` class itself will supply one additional
* automatic invariant for you - the invariant that any transaction instance
* should not be run while it is already being run. You would typically create a
* single instance of a `Transaction` for reuse multiple times, that potentially
* is used to wrap several different methods. Wrappers are extremely simple -
* they only require implementing two methods.
*
* <pre>
* wrappers (injected at creation time)
* + +
* | |
* +-----------------|--------|--------------+
* | v | |
* | +---------------+ | |
* | +--| wrapper1 |---|----+ |
* | | +---------------+ v | |
* | | +-------------+ | |
* | | +----| wrapper2 |--------+ |
* | | | +-------------+ | | |
* | | | | | |
* | v v v v | wrapper
* | +---+ +---+ +---------+ +---+ +---+ | invariants
* perform(anyMethod) | | | | | | | | | | | | maintained
* +----------------->|-|---|-|---|-->|anyMethod|---|---|-|---|-|-------->
* | | | | | | | | | | | |
* | | | | | | | | | | | |
* | | | | | | | | | | | |
* | +---+ +---+ +---------+ +---+ +---+ |
* | initialize close |
* +-----------------------------------------+
* </pre>
*
* Use cases:
* - Preserving the input selection ranges before/after reconciliation.
* Restoring selection even in the event of an unexpected error.
* - Deactivating events while rearranging the DOM, preventing blurs/focuses,
* while guaranteeing that afterwards, the event system is reactivated.
* - Flushing a queue of collected DOM mutations to the main UI thread after a
* reconciliation takes place in a worker thread.
* - Invoking any collected `componentDidUpdate` callbacks after rendering new
* content.
* - (Future use case): Wrapping particular flushes of the `ReactWorker` queue
* to preserve the `scrollTop` (an automatic scroll aware DOM).
* - (Future use case): Layout calculations before and after DOM updates.
*
* Transactional plugin API:
* - A module that has an `initialize` method that returns any precomputation.
* - and a `close` method that accepts the precomputation. `close` is invoked
* when the wrapped process is completed, or has failed.
*
* @param {Array<TransactionalWrapper>} transactionWrapper Wrapper modules
* that implement `initialize` and `close`.
* @return {Transaction} Single transaction for reuse in thread.
*
* @class Transaction
*/
var Mixin = {
/**
* Sets up this instance so that it is prepared for collecting metrics. Does
* so such that this setup method may be used on an instance that is already
* initialized, in a way that does not consume additional memory upon reuse.
* That can be useful if you decide to make your subclass of this mixin a
* "PooledClass".
*/
reinitializeTransaction: function() {
this.transactionWrappers = this.getTransactionWrappers();
if (!this.wrapperInitData) {
this.wrapperInitData = [];
} else {
this.wrapperInitData.length = 0;
}
this._isInTransaction = false;
},
_isInTransaction: false,
/**
* @abstract
* @return {Array<TransactionWrapper>} Array of transaction wrappers.
*/
getTransactionWrappers: null,
isInTransaction: function() {
return !!this._isInTransaction;
},
/**
* Executes the function within a safety window. Use this for the top level
* methods that result in large amounts of computation/mutations that would
* need to be safety checked.
*
* @param {function} method Member of scope to call.
* @param {Object} scope Scope to invoke from.
* @param {Object?=} args... Arguments to pass to the method (optional).
* Helps prevent need to bind in many cases.
* @return Return value from `method`.
*/
perform: function(method, scope, a, b, c, d, e, f) {
("production" !== "production" ? invariant(
!this.isInTransaction(),
'Transaction.perform(...): Cannot initialize a transaction when there ' +
'is already an outstanding transaction.'
) : invariant(!this.isInTransaction()));
var errorThrown;
var ret;
try {
this._isInTransaction = true;
// Catching errors makes debugging more difficult, so we start with
// errorThrown set to true before setting it to false after calling
// close -- if it's still set to true in the finally block, it means
// one of these calls threw.
errorThrown = true;
this.initializeAll(0);
ret = method.call(scope, a, b, c, d, e, f);
errorThrown = false;
} finally {
try {
if (errorThrown) {
// If `method` throws, prefer to show that stack trace over any thrown
// by invoking `closeAll`.
try {
this.closeAll(0);
} catch (err) {
}
} else {
// Since `method` didn't throw, we don't want to silence the exception
// here.
this.closeAll(0);
}
} finally {
this._isInTransaction = false;
}
}
return ret;
},
initializeAll: function(startIndex) {
var transactionWrappers = this.transactionWrappers;
for (var i = startIndex; i < transactionWrappers.length; i++) {
var wrapper = transactionWrappers[i];
try {
// Catching errors makes debugging more difficult, so we start with the
// OBSERVED_ERROR state before overwriting it with the real return value
// of initialize -- if it's still set to OBSERVED_ERROR in the finally
// block, it means wrapper.initialize threw.
this.wrapperInitData[i] = Transaction.OBSERVED_ERROR;
this.wrapperInitData[i] = wrapper.initialize ?
wrapper.initialize.call(this) :
null;
} finally {
if (this.wrapperInitData[i] === Transaction.OBSERVED_ERROR) {
// The initializer for wrapper i threw an error; initialize the
// remaining wrappers but silence any exceptions from them to ensure
// that the first error is the one to bubble up.
try {
this.initializeAll(i + 1);
} catch (err) {
}
}
}
}
},
/**
* Invokes each of `this.transactionWrappers.close[i]` functions, passing into
* them the respective return values of `this.transactionWrappers.init[i]`
* (`close`rs that correspond to initializers that failed will not be
* invoked).
*/
closeAll: function(startIndex) {
("production" !== "production" ? invariant(
this.isInTransaction(),
'Transaction.closeAll(): Cannot close transaction when none are open.'
) : invariant(this.isInTransaction()));
var transactionWrappers = this.transactionWrappers;
for (var i = startIndex; i < transactionWrappers.length; i++) {
var wrapper = transactionWrappers[i];
var initData = this.wrapperInitData[i];
var errorThrown;
try {
// Catching errors makes debugging more difficult, so we start with
// errorThrown set to true before setting it to false after calling
// close -- if it's still set to true in the finally block, it means
// wrapper.close threw.
errorThrown = true;
if (initData !== Transaction.OBSERVED_ERROR && wrapper.close) {
wrapper.close.call(this, initData);
}
errorThrown = false;
} finally {
if (errorThrown) {
// The closer for wrapper i threw an error; close the remaining
// wrappers but silence any exceptions from them to ensure that the
// first error is the one to bubble up.
try {
this.closeAll(i + 1);
} catch (e) {
}
}
}
}
this.wrapperInitData.length = 0;
}
};
var Transaction = {
Mixin: Mixin,
/**
* Token to look for to determine if an error occured.
*/
OBSERVED_ERROR: {}
};
module.exports = Transaction;
},{"./invariant":155}],122:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ViewportMetrics
*/
'use strict';
var ViewportMetrics = {
currentScrollLeft: 0,
currentScrollTop: 0,
refreshScrollValues: function(scrollPosition) {
ViewportMetrics.currentScrollLeft = scrollPosition.x;
ViewportMetrics.currentScrollTop = scrollPosition.y;
}
};
module.exports = ViewportMetrics;
},{}],123:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule accumulateInto
*/
'use strict';
var invariant = require("./invariant");
/**
*
* Accumulates items that must not be null or undefined into the first one. This
* is used to conserve memory by avoiding array allocations, and thus sacrifices
* API cleanness. Since `current` can be null before being passed in and not
* null after this function, make sure to assign it back to `current`:
*
* `a = accumulateInto(a, b);`
*
* This API should be sparingly used. Try `accumulate` for something cleaner.
*
* @return {*|array<*>} An accumulation of items.
*/
function accumulateInto(current, next) {
("production" !== "production" ? invariant(
next != null,
'accumulateInto(...): Accumulated items must not be null or undefined.'
) : invariant(next != null));
if (current == null) {
return next;
}
// Both are not empty. Warning: Never call x.concat(y) when you are not
// certain that x is an Array (x could be a string with concat method).
var currentIsArray = Array.isArray(current);
var nextIsArray = Array.isArray(next);
if (currentIsArray && nextIsArray) {
current.push.apply(current, next);
return current;
}
if (currentIsArray) {
current.push(next);
return current;
}
if (nextIsArray) {
// A bit too dangerous to mutate `next`.
return [current].concat(next);
}
return [current, next];
}
module.exports = accumulateInto;
},{"./invariant":155}],124:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule adler32
*/
/* jslint bitwise:true */
'use strict';
var MOD = 65521;
// This is a clean-room implementation of adler32 designed for detecting
// if markup is not what we expect it to be. It does not need to be
// cryptographically strong, only reasonably good at detecting if markup
// generated on the server is different than that on the client.
function adler32(data) {
var a = 1;
var b = 0;
for (var i = 0; i < data.length; i++) {
a = (a + data.charCodeAt(i)) % MOD;
b = (b + a) % MOD;
}
return a | (b << 16);
}
module.exports = adler32;
},{}],125:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule camelize
* @typechecks
*/
var _hyphenPattern = /-(.)/g;
/**
* Camelcases a hyphenated string, for example:
*
* > camelize('background-color')
* < "backgroundColor"
*
* @param {string} string
* @return {string}
*/
function camelize(string) {
return string.replace(_hyphenPattern, function(_, character) {
return character.toUpperCase();
});
}
module.exports = camelize;
},{}],126:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule camelizeStyleName
* @typechecks
*/
"use strict";
var camelize = require("./camelize");
var msPattern = /^-ms-/;
/**
* Camelcases a hyphenated CSS property name, for example:
*
* > camelizeStyleName('background-color')
* < "backgroundColor"
* > camelizeStyleName('-moz-transition')
* < "MozTransition"
* > camelizeStyleName('-ms-transition')
* < "msTransition"
*
* As Andi Smith suggests
* (http://www.andismith.com/blog/2012/02/modernizr-prefixed/), an `-ms` prefix
* is converted to lowercase `ms`.
*
* @param {string} string
* @return {string}
*/
function camelizeStyleName(string) {
return camelize(string.replace(msPattern, 'ms-'));
}
module.exports = camelizeStyleName;
},{"./camelize":125}],127:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @typechecks static-only
* @providesModule cloneWithProps
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactPropTransferer = require("./ReactPropTransferer");
var keyOf = require("./keyOf");
var warning = require("./warning");
var CHILDREN_PROP = keyOf({children: null});
/**
* Sometimes you want to change the props of a child passed to you. Usually
* this is to add a CSS class.
*
* @param {ReactElement} child child element you'd like to clone
* @param {object} props props you'd like to modify. className and style will be
* merged automatically.
* @return {ReactElement} a clone of child with props merged in.
*/
function cloneWithProps(child, props) {
if ("production" !== "production") {
("production" !== "production" ? warning(
!child.ref,
'You are calling cloneWithProps() on a child with a ref. This is ' +
'dangerous because you\'re creating a new child which will not be ' +
'added as a ref to its parent.'
) : null);
}
var newProps = ReactPropTransferer.mergeProps(props, child.props);
// Use `child.props.children` if it is provided.
if (!newProps.hasOwnProperty(CHILDREN_PROP) &&
child.props.hasOwnProperty(CHILDREN_PROP)) {
newProps.children = child.props.children;
}
// The current API doesn't retain _owner and _context, which is why this
// doesn't use ReactElement.cloneAndReplaceProps.
return ReactElement.createElement(child.type, newProps);
}
module.exports = cloneWithProps;
},{"./ReactElement":67,"./ReactPropTransferer":87,"./keyOf":162,"./warning":176}],128:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule containsNode
* @typechecks
*/
var isTextNode = require("./isTextNode");
/*jslint bitwise:true */
/**
* Checks if a given DOM node contains or is another DOM node.
*
* @param {?DOMNode} outerNode Outer DOM node.
* @param {?DOMNode} innerNode Inner DOM node.
* @return {boolean} True if `outerNode` contains or is `innerNode`.
*/
function containsNode(outerNode, innerNode) {
if (!outerNode || !innerNode) {
return false;
} else if (outerNode === innerNode) {
return true;
} else if (isTextNode(outerNode)) {
return false;
} else if (isTextNode(innerNode)) {
return containsNode(outerNode, innerNode.parentNode);
} else if (outerNode.contains) {
return outerNode.contains(innerNode);
} else if (outerNode.compareDocumentPosition) {
return !!(outerNode.compareDocumentPosition(innerNode) & 16);
} else {
return false;
}
}
module.exports = containsNode;
},{"./isTextNode":159}],129:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule createArrayFromMixed
* @typechecks
*/
var toArray = require("./toArray");
/**
* Perform a heuristic test to determine if an object is "array-like".
*
* A monk asked Joshu, a Zen master, "Has a dog Buddha nature?"
* Joshu replied: "Mu."
*
* This function determines if its argument has "array nature": it returns
* true if the argument is an actual array, an `arguments' object, or an
* HTMLCollection (e.g. node.childNodes or node.getElementsByTagName()).
*
* It will return false for other array-like objects like Filelist.
*
* @param {*} obj
* @return {boolean}
*/
function hasArrayNature(obj) {
return (
// not null/false
!!obj &&
// arrays are objects, NodeLists are functions in Safari
(typeof obj == 'object' || typeof obj == 'function') &&
// quacks like an array
('length' in obj) &&
// not window
!('setInterval' in obj) &&
// no DOM node should be considered an array-like
// a 'select' element has 'length' and 'item' properties on IE8
(typeof obj.nodeType != 'number') &&
(
// a real array
(// HTMLCollection/NodeList
(Array.isArray(obj) ||
// arguments
('callee' in obj) || 'item' in obj))
)
);
}
/**
* Ensure that the argument is an array by wrapping it in an array if it is not.
* Creates a copy of the argument if it is already an array.
*
* This is mostly useful idiomatically:
*
* var createArrayFromMixed = require('createArrayFromMixed');
*
* function takesOneOrMoreThings(things) {
* things = createArrayFromMixed(things);
* ...
* }
*
* This allows you to treat `things' as an array, but accept scalars in the API.
*
* If you need to convert an array-like object, like `arguments`, into an array
* use toArray instead.
*
* @param {*} obj
* @return {array}
*/
function createArrayFromMixed(obj) {
if (!hasArrayNature(obj)) {
return [obj];
} else if (Array.isArray(obj)) {
return obj.slice();
} else {
return toArray(obj);
}
}
module.exports = createArrayFromMixed;
},{"./toArray":173}],130:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule createFullPageComponent
* @typechecks
*/
'use strict';
// Defeat circular references by requiring this directly.
var ReactClass = require("./ReactClass");
var ReactElement = require("./ReactElement");
var invariant = require("./invariant");
/**
* Create a component that will throw an exception when unmounted.
*
* Components like <html> <head> and <body> can't be removed or added
* easily in a cross-browser way, however it's valuable to be able to
* take advantage of React's reconciliation for styling and <title>
* management. So we just document it and throw in dangerous cases.
*
* @param {string} tag The tag to wrap
* @return {function} convenience constructor of new component
*/
function createFullPageComponent(tag) {
var elementFactory = ReactElement.createFactory(tag);
var FullPageComponent = ReactClass.createClass({
tagName: tag.toUpperCase(),
displayName: 'ReactFullPageComponent' + tag,
componentWillUnmount: function() {
("production" !== "production" ? invariant(
false,
'%s tried to unmount. Because of cross-browser quirks it is ' +
'impossible to unmount some top-level components (eg <html>, <head>, ' +
'and <body>) reliably and efficiently. To fix this, have a single ' +
'top-level component that never unmounts render these elements.',
this.constructor.displayName
) : invariant(false));
},
render: function() {
return elementFactory(this.props);
}
});
return FullPageComponent;
}
module.exports = createFullPageComponent;
},{"./ReactClass":42,"./ReactElement":67,"./invariant":155}],131:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule createNodesFromMarkup
* @typechecks
*/
/*jslint evil: true, sub: true */
var ExecutionEnvironment = require("./ExecutionEnvironment");
var createArrayFromMixed = require("./createArrayFromMixed");
var getMarkupWrap = require("./getMarkupWrap");
var invariant = require("./invariant");
/**
* Dummy container used to render all markup.
*/
var dummyNode =
ExecutionEnvironment.canUseDOM ? document.createElement('div') : null;
/**
* Pattern used by `getNodeName`.
*/
var nodeNamePattern = /^\s*<(\w+)/;
/**
* Extracts the `nodeName` of the first element in a string of markup.
*
* @param {string} markup String of markup.
* @return {?string} Node name of the supplied markup.
*/
function getNodeName(markup) {
var nodeNameMatch = markup.match(nodeNamePattern);
return nodeNameMatch && nodeNameMatch[1].toLowerCase();
}
/**
* Creates an array containing the nodes rendered from the supplied markup. The
* optionally supplied `handleScript` function will be invoked once for each
* <script> element that is rendered. If no `handleScript` function is supplied,
* an exception is thrown if any <script> elements are rendered.
*
* @param {string} markup A string of valid HTML markup.
* @param {?function} handleScript Invoked once for each rendered <script>.
* @return {array<DOMElement|DOMTextNode>} An array of rendered nodes.
*/
function createNodesFromMarkup(markup, handleScript) {
var node = dummyNode;
("production" !== "production" ? invariant(!!dummyNode, 'createNodesFromMarkup dummy not initialized') : invariant(!!dummyNode));
var nodeName = getNodeName(markup);
var wrap = nodeName && getMarkupWrap(nodeName);
if (wrap) {
node.innerHTML = wrap[1] + markup + wrap[2];
var wrapDepth = wrap[0];
while (wrapDepth--) {
node = node.lastChild;
}
} else {
node.innerHTML = markup;
}
var scripts = node.getElementsByTagName('script');
if (scripts.length) {
("production" !== "production" ? invariant(
handleScript,
'createNodesFromMarkup(...): Unexpected <script> element rendered.'
) : invariant(handleScript));
createArrayFromMixed(scripts).forEach(handleScript);
}
var nodes = createArrayFromMixed(node.childNodes);
while (node.lastChild) {
node.removeChild(node.lastChild);
}
return nodes;
}
module.exports = createNodesFromMarkup;
},{"./ExecutionEnvironment":26,"./createArrayFromMixed":129,"./getMarkupWrap":147,"./invariant":155}],132:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule cx
*/
/**
* This function is used to mark string literals representing CSS class names
* so that they can be transformed statically. This allows for modularization
* and minification of CSS class names.
*
* In static_upstream, this function is actually implemented, but it should
* eventually be replaced with something more descriptive, and the transform
* that is used in the main stack should be ported for use elsewhere.
*
* @param string|object className to modularize, or an object of key/values.
* In the object case, the values are conditions that
* determine if the className keys should be included.
* @param [string ...] Variable list of classNames in the string case.
* @return string Renderable space-separated CSS className.
*/
'use strict';
var warning = require("./warning");
var warned = false;
function cx(classNames) {
if ("production" !== "production") {
("production" !== "production" ? warning(
warned,
'React.addons.classSet will be deprecated in a future version. See ' +
'http://fb.me/react-addons-classset'
) : null);
warned = true;
}
if (typeof classNames == 'object') {
return Object.keys(classNames).filter(function(className) {
return classNames[className];
}).join(' ');
} else {
return Array.prototype.join.call(arguments, ' ');
}
}
module.exports = cx;
},{"./warning":176}],133:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule dangerousStyleValue
* @typechecks static-only
*/
'use strict';
var CSSProperty = require("./CSSProperty");
var isUnitlessNumber = CSSProperty.isUnitlessNumber;
/**
* Convert a value into the proper css writable value. The style name `name`
* should be logical (no hyphens), as specified
* in `CSSProperty.isUnitlessNumber`.
*
* @param {string} name CSS property name such as `topMargin`.
* @param {*} value CSS property value such as `10px`.
* @return {string} Normalized style value with dimensions applied.
*/
function dangerousStyleValue(name, value) {
// Note that we've removed escapeTextForBrowser() calls here since the
// whole string will be escaped when the attribute is injected into
// the markup. If you provide unsafe user data here they can inject
// arbitrary CSS which may be problematic (I couldn't repro this):
// https://www.owasp.org/index.php/XSS_Filter_Evasion_Cheat_Sheet
// http://www.thespanner.co.uk/2007/11/26/ultimate-xss-css-injection/
// This is not an XSS hole but instead a potential CSS injection issue
// which has lead to a greater discussion about how we're going to
// trust URLs moving forward. See #2115901
var isEmpty = value == null || typeof value === 'boolean' || value === '';
if (isEmpty) {
return '';
}
var isNonNumeric = isNaN(value);
if (isNonNumeric || value === 0 ||
isUnitlessNumber.hasOwnProperty(name) && isUnitlessNumber[name]) {
return '' + value; // cast to string
}
if (typeof value === 'string') {
value = value.trim();
}
return value + 'px';
}
module.exports = dangerousStyleValue;
},{"./CSSProperty":9}],134:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the | *
* @providesModule emptyFunction
*/
function makeEmptyFunction(arg) {
return function() {
return arg;
};
}
/**
* This function accepts and discards inputs; it has no side effects. This is
* primarily useful idiomatically for overridable function endpoints which
* always need to be callable, since JS lacks a null-call idiom ala Cocoa.
*/
function emptyFunction() {}
emptyFunction.thatReturns = makeEmptyFunction;
emptyFunction.thatReturnsFalse = makeEmptyFunction(false);
emptyFunction.thatReturnsTrue = makeEmptyFunction(true);
emptyFunction.thatReturnsNull = makeEmptyFunction(null);
emptyFunction.thatReturnsThis = function() { return this; };
emptyFunction.thatReturnsArgument = function(arg) { return arg; };
module.exports = emptyFunction;
},{}],135:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule emptyObject
*/
"use strict";
var emptyObject = {};
if ("production" !== "production") {
Object.freeze(emptyObject);
}
module.exports = emptyObject;
},{}],136:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule escapeTextContentForBrowser
*/
'use strict';
var ESCAPE_LOOKUP = {
'&': '&',
'>': '>',
'<': '<',
'"': '"',
'\'': '''
};
var ESCAPE_REGEX = /[&><"']/g;
function escaper(match) {
return ESCAPE_LOOKUP[match];
}
/**
* Escapes text to prevent scripting attacks.
*
* @param {*} text Text value to escape.
* @return {string} An escaped string.
*/
function escapeTextContentForBrowser(text) {
return ('' + text).replace(ESCAPE_REGEX, escaper);
}
module.exports = escapeTextContentForBrowser;
},{}],137:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule findDOMNode
* @typechecks static-only
*/
'use strict';
var ReactCurrentOwner = require("./ReactCurrentOwner");
var ReactInstanceMap = require("./ReactInstanceMap");
var ReactMount = require("./ReactMount");
var invariant = require("./invariant");
var isNode = require("./isNode");
var warning = require("./warning");
/**
* Returns the DOM node rendered by this element.
*
* @param {ReactComponent|DOMElement} componentOrElement
* @return {DOMElement} The root node of this element.
*/
function findDOMNode(componentOrElement) {
if ("production" !== "production") {
var owner = ReactCurrentOwner.current;
if (owner !== null) {
("production" !== "production" ? warning(
owner._warnedAboutRefsInRender,
'%s is accessing getDOMNode or findDOMNode inside its render(). ' +
'render() should be a pure function of props and state. It should ' +
'never access something that requires stale data from the previous ' +
'render, such as refs. Move this logic to componentDidMount and ' +
'componentDidUpdate instead.',
owner.getName() || 'A component'
) : null);
owner._warnedAboutRefsInRender = true;
}
}
if (componentOrElement == null) {
return null;
}
if (isNode(componentOrElement)) {
return componentOrElement;
}
if (ReactInstanceMap.has(componentOrElement)) {
return ReactMount.getNodeFromInstance(componentOrElement);
}
("production" !== "production" ? invariant(
componentOrElement.render == null ||
typeof componentOrElement.render !== 'function',
'Component (with keys: %s) contains `render` method ' +
'but is not mounted in the DOM',
Object.keys(componentOrElement)
) : invariant(componentOrElement.render == null ||
typeof componentOrElement.render !== 'function'));
("production" !== "production" ? invariant(
false,
'Element appears to be neither ReactComponent nor DOMNode (keys: %s)',
Object.keys(componentOrElement)
) : invariant(false));
}
module.exports = findDOMNode;
},{"./ReactCurrentOwner":49,"./ReactInstanceMap":77,"./ReactMount":81,"./invariant":155,"./isNode":157,"./warning":176}],138:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule flattenChildren
*/
'use strict';
var traverseAllChildren = require("./traverseAllChildren");
var warning = require("./warning");
/**
* @param {function} traverseContext Context passed through traversal.
* @param {?ReactComponent} child React child component.
* @param {!string} name String name of key path to child.
*/
function flattenSingleChildIntoContext(traverseContext, child, name) {
// We found a component instance.
var result = traverseContext;
var keyUnique = !result.hasOwnProperty(name);
if ("production" !== "production") {
("production" !== "production" ? warning(
keyUnique,
'flattenChildren(...): Encountered two children with the same key, ' +
'`%s`. Child keys must be unique; when two children share a key, only ' +
'the first child will be used.',
name
) : null);
}
if (keyUnique && child != null) {
result[name] = child;
}
}
/**
* Flattens children that are typically specified as `props.children`. Any null
* children will not be included in the resulting object.
* @return {!object} flattened children keyed by name.
*/
function flattenChildren(children) {
if (children == null) {
return children;
}
var result = {};
traverseAllChildren(children, flattenSingleChildIntoContext, result);
return result;
}
module.exports = flattenChildren;
},{"./traverseAllChildren":174,"./warning":176}],139:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule focusNode
*/
"use strict";
/**
* @param {DOMElement} node input/textarea to focus
*/
function focusNode(node) {
// IE8 can throw "Can't move focus to the control because it is invisible,
// not enabled, or of a type that does not accept the focus." for all kinds of
// reasons that are too expensive and fragile to test.
try {
node.focus();
} catch(e) {
}
}
module.exports = focusNode;
},{}],140:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule forEachAccumulated
*/
'use strict';
/**
* @param {array} an "accumulation" of items which is either an Array or
* a single item. Useful when paired with the `accumulate` module. This is a
* simple utility that allows us to reason about a collection of items, but
* handling the case when there is exactly one item (and we do not need to
* allocate an array).
*/
var forEachAccumulated = function(arr, cb, scope) {
if (Array.isArray(arr)) {
arr.forEach(cb, scope);
} else if (arr) {
cb.call(scope, arr);
}
};
module.exports = forEachAccumulated;
},{}],141:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getActiveElement
* @typechecks
*/
/**
* Same as document.activeElement but wraps in a try-catch block. In IE it is
* not safe to call document.activeElement if there is nothing focused.
*
* The activeElement will be null only if the document body is not yet defined.
*/
function getActiveElement() /*?DOMElement*/ {
try {
return document.activeElement || document.body;
} catch (e) {
return document.body;
}
}
module.exports = getActiveElement;
},{}],142:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getEventCharCode
* @typechecks static-only
*/
'use strict';
/**
* `charCode` represents the actual "character code" and is safe to use with
* `String.fromCharCode`. As such, only keys that correspond to printable
* characters produce a valid `charCode`, the only exception to this is Enter.
* The Tab-key is considered non-printable and does not have a `charCode`,
* presumably because it does not produce a tab-character in browsers.
*
* @param {object} nativeEvent Native browser event.
* @return {string} Normalized `charCode` property.
*/
function getEventCharCode(nativeEvent) {
var charCode;
var keyCode = nativeEvent.keyCode;
if ('charCode' in nativeEvent) {
charCode = nativeEvent.charCode;
// FF does not set `charCode` for the Enter-key, check against `keyCode`.
if (charCode === 0 && keyCode === 13) {
charCode = 13;
}
} else {
// IE8 does not implement `charCode`, but `keyCode` has the correct value.
charCode = keyCode;
}
// Some non-printable keys are reported in `charCode`/`keyCode`, discard them.
// Must not discard the (non-)printable Enter-key.
if (charCode >= 32 || charCode === 13) {
return charCode;
}
return 0;
}
module.exports = getEventCharCode;
},{}],143:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getEventKey
* @typechecks static-only
*/
'use strict';
var getEventCharCode = require("./getEventCharCode");
/**
* Normalization of deprecated HTML5 `key` values
* @see https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent#Key_names
*/
var normalizeKey = {
'Esc': 'Escape',
'Spacebar': ' ',
'Left': 'ArrowLeft',
'Up': 'ArrowUp',
'Right': 'ArrowRight',
'Down': 'ArrowDown',
'Del': 'Delete',
'Win': 'OS',
'Menu': 'ContextMenu',
'Apps': 'ContextMenu',
'Scroll': 'ScrollLock',
'MozPrintableKey': 'Unidentified'
};
/**
* Translation from legacy `keyCode` to HTML5 `key`
* Only special keys supported, all others depend on keyboard layout or browser
* @see https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent#Key_names
*/
var translateToKey = {
8: 'Backspace',
9: 'Tab',
12: 'Clear',
13: 'Enter',
16: 'Shift',
17: 'Control',
18: 'Alt',
19: 'Pause',
20: 'CapsLock',
27: 'Escape',
32: ' ',
33: 'PageUp',
34: 'PageDown',
35: 'End',
36: 'Home',
37: 'ArrowLeft',
38: 'ArrowUp',
39: 'ArrowRight',
40: 'ArrowDown',
45: 'Insert',
46: 'Delete',
112: 'F1', 113: 'F2', 114: 'F3', 115: 'F4', 116: 'F5', 117: 'F6',
118: 'F7', 119: 'F8', 120: 'F9', 121: 'F10', 122: 'F11', 123: 'F12',
144: 'NumLock',
145: 'ScrollLock',
224: 'Meta'
};
/**
* @param {object} nativeEvent Native browser event.
* @return {string} Normalized `key` property.
*/
function getEventKey(nativeEvent) {
if (nativeEvent.key) {
// Normalize inconsistent values reported by browsers due to
// implementations of a working draft specification.
// FireFox implements `key` but returns `MozPrintableKey` for all
// printable characters (normalized to `Unidentified`), ignore it.
var key = normalizeKey[nativeEvent.key] || nativeEvent.key;
if (key !== 'Unidentified') {
return key;
}
}
// Browser does not implement `key`, polyfill as much of it as we can.
if (nativeEvent.type === 'keypress') {
var charCode = getEventCharCode(nativeEvent);
// The enter-key is technically both printable and non-printable and can
// thus be captured by `keypress`, no other non-printable key should.
return charCode === 13 ? 'Enter' : String.fromCharCode(charCode);
}
if (nativeEvent.type === 'keydown' || nativeEvent.type === 'keyup') {
// While user keyboard layout determines the actual meaning of each
// `keyCode` value, almost all function keys have a universal value.
return translateToKey[nativeEvent.keyCode] || 'Unidentified';
}
return '';
}
module.exports = getEventKey;
},{"./getEventCharCode":142}],144:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getEventModifierState
* @typechecks static-only
*/
'use strict';
/**
* Translation from modifier key to the associated property in the event.
* @see http://www.w3.org/TR/DOM-Level-3-Events/#keys-Modifiers
*/
var modifierKeyToProp = {
'Alt': 'altKey',
'Control': 'ctrlKey',
'Meta': 'metaKey',
'Shift': 'shiftKey'
};
// IE8 does not implement getModifierState so we simply map it to the only
// modifier keys exposed by the event itself, does not support Lock-keys.
// Currently, all major browsers except Chrome seems to support Lock-keys.
function modifierStateGetter(keyArg) {
/*jshint validthis:true */
var syntheticEvent = this;
var nativeEvent = syntheticEvent.nativeEvent;
if (nativeEvent.getModifierState) {
return nativeEvent.getModifierState(keyArg);
}
var keyProp = modifierKeyToProp[keyArg];
return keyProp ? !!nativeEvent[keyProp] : false;
}
function getEventModifierState(nativeEvent) {
return modifierStateGetter;
}
module.exports = getEventModifierState;
},{}],145:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getEventTarget
* @typechecks static-only
*/
'use strict';
/**
* Gets the target node from a native browser event by accounting for
* inconsistencies in browser DOM APIs.
*
* @param {object} nativeEvent Native browser event.
* @return {DOMEventTarget} Target node.
*/
function getEventTarget(nativeEvent) {
var target = nativeEvent.target || nativeEvent.srcElement || window;
// Safari may fire events on text nodes (Node.TEXT_NODE is 3).
// @see http://www.quirksmode.org/js/events_properties.html
return target.nodeType === 3 ? target.parentNode : target;
}
module.exports = getEventTarget;
},{}],146:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getIteratorFn
* @typechecks static-only
*/
'use strict';
/* global Symbol */
var ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator;
var FAUX_ITERATOR_SYMBOL = '@@iterator'; // Before Symbol spec.
/**
* Returns the iterator method function contained on the iterable object.
*
* Be sure to invoke the function with the iterable as context:
*
* var iteratorFn = getIteratorFn(myIterable);
* if (iteratorFn) {
* var iterator = iteratorFn.call(myIterable);
* ...
* }
*
* @param {?object} maybeIterable
* @return {?function}
*/
function getIteratorFn(maybeIterable) {
var iteratorFn = maybeIterable && (
(ITERATOR_SYMBOL && maybeIterable[ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL])
);
if (typeof iteratorFn === 'function') {
return iteratorFn;
}
}
module.exports = getIteratorFn;
},{}],147:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getMarkupWrap
*/
var ExecutionEnvironment = require("./ExecutionEnvironment");
var invariant = require("./invariant");
/**
* Dummy container used to detect which wraps are necessary.
*/
var dummyNode =
ExecutionEnvironment.canUseDOM ? document.createElement('div') : null;
/**
* Some browsers cannot use `innerHTML` to render certain elements standalone,
* so we wrap them, render the wrapped nodes, then extract the desired node.
*
* In IE8, certain elements cannot render alone, so wrap all elements ('*').
*/
var shouldWrap = {
// Force wrapping for SVG elements because if they get created inside a <div>,
// they will be initialized in the wrong namespace (and will not display).
'circle': true,
'clipPath': true,
'defs': true,
'ellipse': true,
'g': true,
'line': true,
'linearGradient': true,
'path': true,
'polygon': true,
'polyline': true,
'radialGradient': true,
'rect': true,
'stop': true,
'text': true
};
var selectWrap = [1, '<select multiple="true">', '</select>'];
var tableWrap = [1, '<table>', '</table>'];
var trWrap = [3, '<table><tbody><tr>', '</tr></tbody></table>'];
var svgWrap = [1, '<svg>', '</svg>'];
var markupWrap = {
'*': [1, '?<div>', '</div>'],
'area': [1, '<map>', '</map>'],
'col': [2, '<table><tbody></tbody><colgroup>', '</colgroup></table>'],
'legend': [1, '<fieldset>', '</fieldset>'],
'param': [1, '<object>', '</object>'],
'tr': [2, '<table><tbody>', '</tbody></table>'],
'optgroup': selectWrap,
'option': selectWrap,
'caption': tableWrap,
'colgroup': tableWrap,
'tbody': tableWrap,
'tfoot': tableWrap,
'thead': tableWrap,
'td': trWrap,
'th': trWrap,
'circle': svgWrap,
'clipPath': svgWrap,
'defs': svgWrap,
'ellipse': svgWrap,
'g': svgWrap,
'line': svgWrap,
'linearGradient': svgWrap,
'path': svgWrap,
'polygon': svgWrap,
'polyline': svgWrap,
'radialGradient': svgWrap,
'rect': svgWrap,
'stop': svgWrap,
'text': svgWrap
};
/**
* Gets the markup wrap configuration for the supplied `nodeName`.
*
* NOTE: This lazily detects which wraps are necessary for the current browser.
*
* @param {string} nodeName Lowercase `nodeName`.
* @return {?array} Markup wrap configuration, if applicable.
*/
function getMarkupWrap(nodeName) {
("production" !== "production" ? invariant(!!dummyNode, 'Markup wrapping node not initialized') : invariant(!!dummyNode));
if (!markupWrap.hasOwnProperty(nodeName)) {
nodeName = '*';
}
if (!shouldWrap.hasOwnProperty(nodeName)) {
if (nodeName === '*') {
dummyNode.innerHTML = '<link />';
} else {
dummyNode.innerHTML = '<' + nodeName + '></' + nodeName + '>';
}
shouldWrap[nodeName] = !dummyNode.firstChild;
}
return shouldWrap[nodeName] ? markupWrap[nodeName] : null;
}
module.exports = getMarkupWrap;
},{"./ExecutionEnvironment":26,"./invariant":155}],148:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getNodeForCharacterOffset
*/
'use strict';
/**
* Given any node return the first leaf node without children.
*
* @param {DOMElement|DOMTextNode} node
* @return {DOMElement|DOMTextNode}
*/
function getLeafNode(node) {
while (node && node.firstChild) {
node = node.firstChild;
}
return node;
}
/**
* Get the next sibling within a container. This will walk up the
* DOM if a node's siblings have been exhausted.
*
* @param {DOMElement|DOMTextNode} node
* @return {?DOMElement|DOMTextNode}
*/
function getSiblingNode(node) {
while (node) {
if (node.nextSibling) {
return node.nextSibling;
}
node = node.parentNode;
}
}
/**
* Get object describing the nodes which contain characters at offset.
*
* @param {DOMElement|DOMTextNode} root
* @param {number} offset
* @return {?object}
*/
function getNodeForCharacterOffset(root, offset) {
var node = getLeafNode(root);
var nodeStart = 0;
var nodeEnd = 0;
while (node) {
if (node.nodeType === 3) {
nodeEnd = nodeStart + node.textContent.length;
if (nodeStart <= offset && nodeEnd >= offset) {
return {
node: node,
offset: offset - nodeStart
};
}
nodeStart = nodeEnd;
}
node = getLeafNode(getSiblingNode(node));
}
}
module.exports = getNodeForCharacterOffset;
},{}],149:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getReactRootElementInContainer
*/
'use strict';
var DOC_NODE_TYPE = 9;
/**
* @param {DOMElement|DOMDocument} container DOM element that may contain
* a React component
* @return {?*} DOM element that may have the reactRoot ID, or null.
*/
function getReactRootElementInContainer(container) {
if (!container) {
return null;
}
if (container.nodeType === DOC_NODE_TYPE) {
return container.documentElement;
} else {
return container.firstChild;
}
}
module.exports = getReactRootElementInContainer;
},{}],150:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getTextContentAccessor
*/
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
var contentKey = null;
/**
* Gets the key used to access text content on a DOM node.
*
* @return {?string} Key used to access text content.
* @internal
*/
function getTextContentAccessor() {
if (!contentKey && ExecutionEnvironment.canUseDOM) {
// Prefer textContent to innerText because many browsers support both but
// SVG <text> elements don't support innerText even when <div> does.
contentKey = 'textContent' in document.documentElement ?
'textContent' :
'innerText';
}
return contentKey;
}
module.exports = getTextContentAccessor;
},{"./ExecutionEnvironment":26}],151:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule getUnboundedScrollPosition
* @typechecks
*/
"use strict";
/**
* Gets the scroll position of the supplied element or window.
*
* The return values are unbounded, unlike `getScrollPosition`. This means they
* may be negative or exceed the element boundaries (which is possible using
* inertial scrolling).
*
* @param {DOMWindow|DOMElement} scrollable
* @return {object} Map with `x` and `y` keys.
*/
function getUnboundedScrollPosition(scrollable) {
if (scrollable === window) {
return {
x: window.pageXOffset || document.documentElement.scrollLeft,
y: window.pageYOffset || document.documentElement.scrollTop
};
}
return {
x: scrollable.scrollLeft,
y: scrollable.scrollTop
};
}
module.exports = getUnboundedScrollPosition;
},{}],152:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule hyphenate
* @typechecks
*/
var _uppercasePattern = /([A-Z])/g;
/**
* Hyphenates a camelcased string, for example:
*
* > hyphenate('backgroundColor')
* < "background-color"
*
* For CSS style names, use `hyphenateStyleName` instead which works properly
* with all vendor prefixes, including `ms`.
*
* @param {string} string
* @return {string}
*/
function hyphenate(string) {
return string.replace(_uppercasePattern, '-$1').toLowerCase();
}
module.exports = hyphenate;
},{}],153:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule hyphenateStyleName
* @typechecks
*/
"use strict";
var hyphenate = require("./hyphenate");
var msPattern = /^ms-/;
/**
* Hyphenates a camelcased CSS property name, for example:
*
* > hyphenateStyleName('backgroundColor')
* < "background-color"
* > hyphenateStyleName('MozTransition')
* < "-moz-transition"
* > hyphenateStyleName('msTransition')
* < "-ms-transition"
*
* As Modernizr suggests (http://modernizr.com/docs/#prefixed), an `ms` prefix
* is converted to `-ms-`.
*
* @param {string} string
* @return {string}
*/
function hyphenateStyleName(string) {
return hyphenate(string).replace(msPattern, '-ms-');
}
module.exports = hyphenateStyleName;
},{"./hyphenate":152}],154:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule instantiateReactComponent
* @typechecks static-only
*/
'use strict';
var ReactCompositeComponent = require("./ReactCompositeComponent");
var ReactEmptyComponent = require("./ReactEmptyComponent");
var ReactNativeComponent = require("./ReactNativeComponent");
var assign = require("./Object.assign");
var invariant = require("./invariant");
var warning = require("./warning");
// To avoid a cyclic dependency, we create the final class in this module
var ReactCompositeComponentWrapper = function() { };
assign(
ReactCompositeComponentWrapper.prototype,
ReactCompositeComponent.Mixin,
{
_instantiateReactComponent: instantiateReactComponent
}
);
/**
* Check if the type reference is a known internal type. I.e. not a user
* provided composite type.
*
* @param {function} type
* @return {boolean} Returns true if this is a valid internal type.
*/
function isInternalComponentType(type) {
return (
typeof type === 'function' &&
typeof type.prototype !== 'undefined' &&
typeof type.prototype.mountComponent === 'function' &&
typeof type.prototype.receiveComponent === 'function'
);
}
/**
* Given a ReactNode, create an instance that will actually be mounted.
*
* @param {ReactNode} node
* @param {*} parentCompositeType The composite type that resolved this.
* @return {object} A new instance of the element's constructor.
* @protected
*/
function instantiateReactComponent(node, parentCompositeType) {
var instance;
if (node === null || node === false) {
node = ReactEmptyComponent.emptyElement;
}
if (typeof node === 'object') {
var element = node;
if ("production" !== "production") {
("production" !== "production" ? warning(
element && (typeof element.type === 'function' ||
typeof element.type === 'string'),
'Only functions or strings can be mounted as React components.'
) : null);
}
// Special case string values
if (parentCompositeType === element.type &&
typeof element.type === 'string') {
// Avoid recursion if the wrapper renders itself.
instance = ReactNativeComponent.createInternalComponent(element);
// All native components are currently wrapped in a composite so we're
// safe to assume that this is what we should instantiate.
} else if (isInternalComponentType(element.type)) {
// This is temporarily available for custom components that are not string
// represenations. I.e. ART. Once those are updated to use the string
// representation, we can drop this code path.
instance = new element.type(element);
} else {
instance = new ReactCompositeComponentWrapper();
}
} else if (typeof node === 'string' || typeof node === 'number') {
instance = ReactNativeComponent.createInstanceForText(node);
} else {
("production" !== "production" ? invariant(
false,
'Encountered invalid React node of type %s',
typeof node
) : invariant(false));
}
if ("production" !== "production") {
("production" !== "production" ? warning(
typeof instance.construct === 'function' &&
typeof instance.mountComponent === 'function' &&
typeof instance.receiveComponent === 'function' &&
typeof instance.unmountComponent === 'function',
'Only React Components can be mounted.'
) : null);
}
// Sets up the instance. This can probably just move into the constructor now.
instance.construct(node);
// These two fields are used by the DOM and ART diffing algorithms
// respectively. Instead of using expandos on components, we should be
// storing the state needed by the diffing algorithms elsewhere.
instance._mountIndex = 0;
instance._mountImage = null;
if ("production" !== "production") {
instance._isOwnerNecessary = false;
instance._warnedAboutRefsInRender = false;
}
// Internal instances should fully constructed at this point, so they should
// not get any new fields added to them at this point.
if ("production" !== "production") {
if (Object.preventExtensions) {
Object.preventExtensions(instance);
}
}
return instance;
}
module.exports = instantiateReactComponent;
},{"./Object.assign":33,"./ReactCompositeComponent":47,"./ReactEmptyComponent":69,"./ReactNativeComponent":84,"./invariant":155,"./warning":176}],155:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule invariant
*/
"use strict";
/**
* Use invariant() to assert state which your program assumes to be true.
*
* Provide sprintf-style format (only %s is supported) and arguments
* to provide information about what broke and what you were
* expecting.
*
* The invariant message will be stripped in production, but the invariant
* will remain to ensure logic does not differ in production.
*/
var invariant = function(condition, format, a, b, c, d, e, f) {
if ("production" !== "production") {
if (format === undefined) {
throw new Error('invariant requires an error message argument');
}
}
if (!condition) {
var error;
if (format === undefined) {
error = new Error(
'Minified exception occurred; use the non-minified dev environment ' +
'for the full error message and additional helpful warnings.'
);
} else {
var args = [a, b, c, d, e, f];
var argIndex = 0;
error = new Error(
'Invariant Violation: ' +
format.replace(/%s/g, function() { return args[argIndex++]; })
);
}
error.framesToPop = 1; // we don't care about invariant's own frame
throw error;
}
};
module.exports = invariant;
},{}],156:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule isEventSupported
*/
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
var useHasFeature;
if (ExecutionEnvironment.canUseDOM) {
useHasFeature =
document.implementation &&
document.implementation.hasFeature &&
// always returns true in newer browsers as per the standard.
// @see http://dom.spec.whatwg.org/#dom-domimplementation-hasfeature
document.implementation.hasFeature('', '') !== true;
}
/**
* Checks if an event is supported in the current execution environment.
*
* NOTE: This will not work correctly for non-generic events such as `change`,
* `reset`, `load`, `error`, and `select`.
*
* Borrows from Modernizr.
*
* @param {string} eventNameSuffix Event name, e.g. "click".
* @param {?boolean} capture Check if the capture phase is supported.
* @return {boolean} True if the event is supported.
* @internal
* @license Modernizr 3.0.0pre (Custom Build) | MIT
*/
function isEventSupported(eventNameSuffix, capture) {
if (!ExecutionEnvironment.canUseDOM ||
capture && !('addEventListener' in document)) {
return false;
}
var eventName = 'on' + eventNameSuffix;
var isSupported = eventName in document;
if (!isSupported) {
var element = document.createElement('div');
element.setAttribute(eventName, 'return;');
isSupported = typeof element[eventName] === 'function';
}
if (!isSupported && useHasFeature && eventNameSuffix === 'wheel') {
// This is the only way to test support for the `wheel` event in IE9+.
isSupported = document.implementation.hasFeature('Events.wheel', '3.0');
}
return isSupported;
}
module.exports = isEventSupported;
},{"./ExecutionEnvironment":26}],157:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule isNode
* @typechecks
*/
/**
* @param {*} object The object to check.
* @return {boolean} Whether or not the object is a DOM node.
*/
function isNode(object) {
return !!(object && (
((typeof Node === 'function' ? object instanceof Node : typeof object === 'object' &&
typeof object.nodeType === 'number' &&
typeof object.nodeName === 'string'))
));
}
module.exports = isNode;
},{}],158:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule isTextInputElement
*/
'use strict';
/**
* @see http://www.whatwg.org/specs/web-apps/current-work/multipage/the-input-element.html#input-type-attr-summary
*/
var supportedInputTypes = {
'color': true,
'date': true,
'datetime': true,
'datetime-local': true,
'email': true,
'month': true,
'number': true,
'password': true,
'range': true,
'search': true,
'tel': true,
'text': true,
'time': true,
'url': true,
'week': true
};
function isTextInputElement(elem) {
return elem && (
(elem.nodeName === 'INPUT' && supportedInputTypes[elem.type] || elem.nodeName === 'TEXTAREA')
);
}
module.exports = isTextInputElement;
},{}],159:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule isTextNode
* @typechecks
*/
var isNode = require("./isNode");
/**
* @param {*} object The object to check.
* @return {boolean} Whether or not the object is a DOM text node.
*/
function isTextNode(object) {
return isNode(object) && object.nodeType == 3;
}
module.exports = isTextNode;
},{"./isNode":157}],160:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule joinClasses
* @typechecks static-only
*/
'use strict';
/**
* Combines multiple className strings into one.
* http://jsperf.com/joinclasses-args-vs-array
*
* @param {...?string} classes
* @return {string}
*/
function joinClasses(className/*, ... */) {
if (!className) {
className = '';
}
var nextClass;
var argLength = arguments.length;
if (argLength > 1) {
for (var ii = 1; ii < argLength; ii++) {
nextClass = arguments[ii];
if (nextClass) {
className = (className ? className + ' ' : '') + nextClass;
}
}
}
return className;
}
module.exports = joinClasses;
},{}],161:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule keyMirror
* @typechecks static-only
*/
'use strict';
var invariant = require("./invariant");
/**
* Constructs an enumeration with keys equal to their value.
*
* For example:
*
* var COLORS = keyMirror({blue: null, red: null});
* var myColor = COLORS.blue;
* var isColorValid = !!COLORS[myColor];
*
* The last line could not be performed if the values of the generated enum were
* not equal to their keys.
*
* Input: {key1: val1, key2: val2}
* Output: {key1: key1, key2: key2}
*
* @param {object} obj
* @return {object}
*/
var keyMirror = function(obj) {
var ret = {};
var key;
("production" !== "production" ? invariant(
obj instanceof Object && !Array.isArray(obj),
'keyMirror(...): Argument must be an object.'
) : invariant(obj instanceof Object && !Array.isArray(obj)));
for (key in obj) {
if (!obj.hasOwnProperty(key)) {
continue;
}
ret[key] = key;
}
return ret;
};
module.exports = keyMirror;
},{"./invariant":155}],162:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule keyOf
*/
/**
* Allows extraction of a minified key. Let's the build system minify keys
* without loosing the ability to dynamically use key strings as values
* themselves. Pass in an object with a single key/val pair and it will return
* you the string key of that single record. Suppose you want to grab the
* value for a key 'className' inside of an object. Key/val minification may
* have aliased that key to be 'xa12'. keyOf({className: null}) will return
* 'xa12' in that case. Resolve keys you want to use once at startup time, then
* reuse those resolutions.
*/
var keyOf = function(oneKeyObj) {
var key;
for (key in oneKeyObj) {
if (!oneKeyObj.hasOwnProperty(key)) {
continue;
}
return key;
}
return null;
};
module.exports = keyOf;
},{}],163:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule mapObject
*/
'use strict';
var hasOwnProperty = Object.prototype.hasOwnProperty;
/**
* Executes the provided `callback` once for each enumerable own property in the
* object and constructs a new object from the results. The `callback` is
* invoked with three arguments:
*
* - the property value
* - the property name
* - the object being traversed
*
* Properties that are added after the call to `mapObject` will not be visited
* by `callback`. If the values of existing properties are changed, the value
* passed to `callback` will be the value at the time `mapObject` visits them.
* Properties that are deleted before being visited are not visited.
*
* @grep function objectMap()
* @grep function objMap()
*
* @param {?object} object
* @param {function} callback
* @param {*} context
* @return {?object}
*/
function mapObject(object, callback, context) {
if (!object) {
return null;
}
var result = {};
for (var name in object) {
if (hasOwnProperty.call(object, name)) {
result[name] = callback.call(context, object[name], name, object);
}
}
return result;
}
module.exports = mapObject;
},{}],164:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule memoizeStringOnly
* @typechecks static-only
*/
'use strict';
/**
* Memoizes the return value of a function that accepts one string argument.
*
* @param {function} callback
* @return {function}
*/
function memoizeStringOnly(callback) {
var cache = {};
return function(string) {
if (!cache.hasOwnProperty(string)) {
cache[string] = callback.call(this, string);
}
return cache[string];
};
}
module.exports = memoizeStringOnly;
},{}],165:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule onlyChild
*/
'use strict';
var ReactElement = require("./ReactElement");
var invariant = require("./invariant");
/**
* Returns the first child in a collection of children and verifies that there
* is only one child in the collection. The current implementation of this
* function assumes that a single child gets passed without a wrapper, but the
* purpose of this helper function is to abstract away the particular structure
* of children.
*
* @param {?object} children Child collection structure.
* @return {ReactComponent} The first and only `ReactComponent` contained in the
* structure.
*/
function onlyChild(children) {
("production" !== "production" ? invariant(
ReactElement.isValidElement(children),
'onlyChild must be passed a children with exactly one child.'
) : invariant(ReactElement.isValidElement(children)));
return children;
}
module.exports = onlyChild;
},{"./ReactElement":67,"./invariant":155}],166:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule performance
* @typechecks
*/
"use strict";
var ExecutionEnvironment = require("./ExecutionEnvironment");
var performance;
if (ExecutionEnvironment.canUseDOM) {
performance =
window.performance ||
window.msPerformance ||
window.webkitPerformance;
}
module.exports = performance || {};
},{"./ExecutionEnvironment":26}],167:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule performanceNow
* @typechecks
*/
var performance = require("./performance");
/**
* Detect if we can use `window.performance.now()` and gracefully fallback to
* `Date.now()` if it doesn't exist. We need to support Firefox < 15 for now
* because of Facebook's testing infrastructure.
*/
if (!performance || !performance.now) {
performance = Date;
}
var performanceNow = performance.now.bind(performance);
module.exports = performanceNow;
},{"./performance":166}],168:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule quoteAttributeValueForBrowser
*/
'use strict';
var escapeTextContentForBrowser = require("./escapeTextContentForBrowser");
/**
* Escapes attribute value to prevent scripting attacks.
*
* @param {*} value Value to escape.
* @return {string} An escaped string.
*/
function quoteAttributeValueForBrowser(value) {
return '"' + escapeTextContentForBrowser(value) + '"';
}
module.exports = quoteAttributeValueForBrowser;
},{"./escapeTextContentForBrowser":136}],169:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule setInnerHTML
*/
/* globals MSApp */
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
var WHITESPACE_TEST = /^[ \r\n\t\f]/;
var NONVISIBLE_TEST = /<(!--|link|noscript|meta|script|style)[ \r\n\t\f\/>]/;
/**
* Set the innerHTML property of a node, ensuring that whitespace is preserved
* even in IE8.
*
* @param {DOMElement} node
* @param {string} html
* @internal
*/
var setInnerHTML = function(node, html) {
node.innerHTML = html;
};
// Win8 apps: Allow all html to be inserted
if (typeof MSApp !== 'undefined' && MSApp.execUnsafeLocalFunction) {
setInnerHTML = function(node, html) {
MSApp.execUnsafeLocalFunction(function() {
node.innerHTML = html;
});
};
}
if (ExecutionEnvironment.canUseDOM) {
// IE8: When updating a just created node with innerHTML only leading
// whitespace is removed. When updating an existing node with innerHTML
// whitespace in root TextNodes is also collapsed.
// @see quirksmode.org/bugreports/archives/2004/11/innerhtml_and_t.html
// Feature detection; only IE8 is known to behave improperly like this.
var testElement = document.createElement('div');
testElement.innerHTML = ' ';
if (testElement.innerHTML === '') {
setInnerHTML = function(node, html) {
// Magic theory: IE8 supposedly differentiates between added and updated
// nodes when processing innerHTML, innerHTML on updated nodes suffers
// from worse whitespace behavior. Re-adding a node like this triggers
// the initial and more favorable whitespace behavior.
// TODO: What to do on a detached node?
if (node.parentNode) {
node.parentNode.replaceChild(node, node);
}
// We also implement a workaround for non-visible tags disappearing into
// thin air on IE8, this only happens if there is no visible text
// in-front of the non-visible tags. Piggyback on the whitespace fix
// and simply check if any non-visible tags appear in the source.
if (WHITESPACE_TEST.test(html) ||
html[0] === '<' && NONVISIBLE_TEST.test(html)) {
// Recover leading whitespace by temporarily prepending any character.
// \uFEFF has the potential advantage of being zero-width/invisible.
node.innerHTML = '\uFEFF' + html;
// deleteData leaves an empty `TextNode` which offsets the index of all
// children. Definitely want to avoid this.
var textNode = node.firstChild;
if (textNode.data.length === 1) {
node.removeChild(textNode);
} else {
textNode.deleteData(0, 1);
}
} else {
node.innerHTML = html;
}
};
}
}
module.exports = setInnerHTML;
},{"./ExecutionEnvironment":26}],170:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule setTextContent
*/
'use strict';
var ExecutionEnvironment = require("./ExecutionEnvironment");
var escapeTextContentForBrowser = require("./escapeTextContentForBrowser");
var setInnerHTML = require("./setInnerHTML");
/**
* Set the textContent property of a node, ensuring that whitespace is preserved
* even in IE8. innerText is a poor substitute for textContent and, among many
* issues, inserts <br> instead of the literal newline chars. innerHTML behaves
* as it should.
*
* @param {DOMElement} node
* @param {string} text
* @internal
*/
var setTextContent = function(node, text) {
node.textContent = text;
};
if (ExecutionEnvironment.canUseDOM) {
if (!('textContent' in document.documentElement)) {
setTextContent = function(node, text) {
setInnerHTML(node, escapeTextContentForBrowser(text));
};
}
}
module.exports = setTextContent;
},{"./ExecutionEnvironment":26,"./escapeTextContentForBrowser":136,"./setInnerHTML":169}],171:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule shallowEqual
*/
'use strict';
/**
* Performs equality by iterating through keys on an object and returning
* false when any key has values which are not strictly equal between
* objA and objB. Returns true when the values of all keys are strictly equal.
*
* @return {boolean}
*/
function shallowEqual(objA, objB) {
if (objA === objB) {
return true;
}
var key;
// Test for A's keys different from B.
for (key in objA) {
if (objA.hasOwnProperty(key) &&
(!objB.hasOwnProperty(key) || objA[key] !== objB[key])) {
return false;
}
}
// Test for B's keys missing from A.
for (key in objB) {
if (objB.hasOwnProperty(key) && !objA.hasOwnProperty(key)) {
return false;
}
}
return true;
}
module.exports = shallowEqual;
},{}],172:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule shouldUpdateReactComponent
* @typechecks static-only
*/
'use strict';
var warning = require("./warning");
/**
* Given a `prevElement` and `nextElement`, determines if the existing
* instance should be updated as opposed to being destroyed or replaced by a new
* instance. Both arguments are elements. This ensures that this logic can
* operate on stateless trees without any backing instance.
*
* @param {?object} prevElement
* @param {?object} nextElement
* @return {boolean} True if the existing instance should be updated.
* @protected
*/
function shouldUpdateReactComponent(prevElement, nextElement) {
if (prevElement != null && nextElement != null) {
var prevType = typeof prevElement;
var nextType = typeof nextElement;
if (prevType === 'string' || prevType === 'number') {
return (nextType === 'string' || nextType === 'number');
} else {
if (nextType === 'object' &&
prevElement.type === nextElement.type &&
prevElement.key === nextElement.key) {
var ownersMatch = prevElement._owner === nextElement._owner;
var prevName = null;
var nextName = null;
var nextDisplayName = null;
if ("production" !== "production") {
if (!ownersMatch) {
if (prevElement._owner != null &&
prevElement._owner.getPublicInstance() != null &&
prevElement._owner.getPublicInstance().constructor != null) {
prevName =
prevElement._owner.getPublicInstance().constructor.displayName;
}
if (nextElement._owner != null &&
nextElement._owner.getPublicInstance() != null &&
nextElement._owner.getPublicInstance().constructor != null) {
nextName =
nextElement._owner.getPublicInstance().constructor.displayName;
}
if (nextElement.type != null &&
nextElement.type.displayName != null) {
nextDisplayName = nextElement.type.displayName;
}
if (nextElement.type != null && typeof nextElement.type === 'string') {
nextDisplayName = nextElement.type;
}
if (typeof nextElement.type !== 'string' ||
nextElement.type === 'input' ||
nextElement.type === 'textarea') {
if ((prevElement._owner != null &&
prevElement._owner._isOwnerNecessary === false) ||
(nextElement._owner != null &&
nextElement._owner._isOwnerNecessary === false)) {
if (prevElement._owner != null) {
prevElement._owner._isOwnerNecessary = true;
}
if (nextElement._owner != null) {
nextElement._owner._isOwnerNecessary = true;
}
("production" !== "production" ? warning(
false,
'<%s /> is being rendered by both %s and %s using the same ' +
'key (%s) in the same place. Currently, this means that ' +
'they don\'t preserve state. This behavior should be very ' +
'rare so we\'re considering deprecating it. Please contact ' +
'the React team and explain your use case so that we can ' +
'take that into consideration.',
nextDisplayName || 'Unknown Component',
prevName || '[Unknown]',
nextName || '[Unknown]',
prevElement.key
) : null);
}
}
}
}
return ownersMatch;
}
}
}
return false;
}
module.exports = shouldUpdateReactComponent;
},{"./warning":176}],173:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule toArray
* @typechecks
*/
var invariant = require("./invariant");
/**
* Convert array-like objects to arrays.
*
* This API assumes the caller knows the contents of the data type. For less
* well defined inputs use createArrayFromMixed.
*
* @param {object|function|filelist} obj
* @return {array}
*/
function toArray(obj) {
var length = obj.length;
// Some browse builtin objects can report typeof 'function' (e.g. NodeList in
// old versions of Safari).
("production" !== "production" ? invariant(
!Array.isArray(obj) &&
(typeof obj === 'object' || typeof obj === 'function'),
'toArray: Array-like object expected'
) : invariant(!Array.isArray(obj) &&
(typeof obj === 'object' || typeof obj === 'function')));
("production" !== "production" ? invariant(
typeof length === 'number',
'toArray: Object needs a length property'
) : invariant(typeof length === 'number'));
("production" !== "production" ? invariant(
length === 0 ||
(length - 1) in obj,
'toArray: Object should have keys for indices'
) : invariant(length === 0 ||
(length - 1) in obj));
// Old IE doesn't give collections access to hasOwnProperty. Assume inputs
// without method will throw during the slice call and skip straight to the
// fallback.
if (obj.hasOwnProperty) {
try {
return Array.prototype.slice.call(obj);
} catch (e) {
// IE < 9 does not support Array#slice on collections objects
}
}
// Fall back to copying key by key. This assumes all keys have a value,
// so will not preserve sparsely populated inputs.
var ret = Array(length);
for (var ii = 0; ii < length; ii++) {
ret[ii] = obj[ii];
}
return ret;
}
module.exports = toArray;
},{"./invariant":155}],174:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule traverseAllChildren
*/
'use strict';
var ReactElement = require("./ReactElement");
var ReactFragment = require("./ReactFragment");
var ReactInstanceHandles = require("./ReactInstanceHandles");
var getIteratorFn = require("./getIteratorFn");
var invariant = require("./invariant");
var warning = require("./warning");
var SEPARATOR = ReactInstanceHandles.SEPARATOR;
var SUBSEPARATOR = ':';
/**
* TODO: Test that a single child and an array with one item have the same key
* pattern.
*/
var userProvidedKeyEscaperLookup = {
'=': '=0',
'.': '=1',
':': '=2'
};
var userProvidedKeyEscapeRegex = /[=.:]/g;
var didWarnAboutMaps = false;
function userProvidedKeyEscaper(match) {
return userProvidedKeyEscaperLookup[match];
}
/**
* Generate a key string that identifies a component within a set.
*
* @param {*} component A component that could contain a manual key.
* @param {number} index Index that is used if a manual key is not provided.
* @return {string}
*/
function getComponentKey(component, index) {
if (component && component.key != null) {
// Explicit key
return wrapUserProvidedKey(component.key);
}
// Implicit key determined by the index in the set
return index.toString(36);
}
/**
* Escape a component key so that it is safe to use in a reactid.
*
* @param {*} key Component key to be escaped.
* @return {string} An escaped string.
*/
function escapeUserProvidedKey(text) {
return ('' + text).replace(
userProvidedKeyEscapeRegex,
userProvidedKeyEscaper
);
}
/**
* Wrap a `key` value explicitly provided by the user to distinguish it from
* implicitly-generated keys generated by a component's index in its parent.
*
* @param {string} key Value of a user-provided `key` attribute
* @return {string}
*/
function wrapUserProvidedKey(key) {
return '$' + escapeUserProvidedKey(key);
}
/**
* @param {?*} children Children tree container.
* @param {!string} nameSoFar Name of the key path so far.
* @param {!number} indexSoFar Number of children encountered until this point.
* @param {!function} callback Callback to invoke with each child found.
* @param {?*} traverseContext Used to pass information throughout the traversal
* process.
* @return {!number} The number of children in this subtree.
*/
function traverseAllChildrenImpl(
children,
nameSoFar,
indexSoFar,
callback,
traverseContext
) {
var type = typeof children;
if (type === 'undefined' || type === 'boolean') {
// All of the above are perceived as null.
children = null;
}
if (children === null ||
type === 'string' ||
type === 'number' ||
ReactElement.isValidElement(children)) {
callback(
traverseContext,
children,
// If it's the only child, treat the name as if it was wrapped in an array
// so that it's consistent if the number of children grows.
nameSoFar === '' ? SEPARATOR + getComponentKey(children, 0) : nameSoFar,
indexSoFar
);
return 1;
}
var child, nextName, nextIndex;
var subtreeCount = 0; // Count of children found in the current subtree.
if (Array.isArray(children)) {
for (var i = 0; i < children.length; i++) {
child = children[i];
nextName = (
(nameSoFar !== '' ? nameSoFar + SUBSEPARATOR : SEPARATOR) +
getComponentKey(child, i)
);
nextIndex = indexSoFar + subtreeCount;
subtreeCount += traverseAllChildrenImpl(
child,
nextName,
nextIndex,
callback,
traverseContext
);
}
} else {
var iteratorFn = getIteratorFn(children);
if (iteratorFn) {
var iterator = iteratorFn.call(children);
var step;
if (iteratorFn !== children.entries) {
var ii = 0;
while (!(step = iterator.next()).done) {
child = step.value;
nextName = (
(nameSoFar !== '' ? nameSoFar + SUBSEPARATOR : SEPARATOR) +
getComponentKey(child, ii++)
);
nextIndex = indexSoFar + subtreeCount;
subtreeCount += traverseAllChildrenImpl(
child,
nextName,
nextIndex,
callback,
traverseContext
);
}
} else {
if ("production" !== "production") {
("production" !== "production" ? warning(
didWarnAboutMaps,
'Using Maps as children is not yet fully supported. It is an ' +
'experimental feature that might be removed. Convert it to a ' +
'sequence / iterable of keyed ReactElements instead.'
) : null);
didWarnAboutMaps = true;
}
// Iterator will provide entry [k,v] tuples rather than values.
while (!(step = iterator.next()).done) {
var entry = step.value;
if (entry) {
child = entry[1];
nextName = (
(nameSoFar !== '' ? nameSoFar + SUBSEPARATOR : SEPARATOR) +
wrapUserProvidedKey(entry[0]) + SUBSEPARATOR +
getComponentKey(child, 0)
);
nextIndex = indexSoFar + subtreeCount;
subtreeCount += traverseAllChildrenImpl(
child,
nextName,
nextIndex,
callback,
traverseContext
);
}
}
}
} else if (type === 'object') {
("production" !== "production" ? invariant(
children.nodeType !== 1,
'traverseAllChildren(...): Encountered an invalid child; DOM ' +
'elements are not valid children of React components.'
) : invariant(children.nodeType !== 1));
var fragment = ReactFragment.extract(children);
for (var key in fragment) {
if (fragment.hasOwnProperty(key)) {
child = fragment[key];
nextName = (
(nameSoFar !== '' ? nameSoFar + SUBSEPARATOR : SEPARATOR) +
wrapUserProvidedKey(key) + SUBSEPARATOR +
getComponentKey(child, 0)
);
nextIndex = indexSoFar + subtreeCount;
subtreeCount += traverseAllChildrenImpl(
child,
nextName,
nextIndex,
callback,
traverseContext
);
}
}
}
}
return subtreeCount;
}
/**
* Traverses children that are typically specified as `props.children`, but
* might also be specified through attributes:
*
* - `traverseAllChildren(this.props.children, ...)`
* - `traverseAllChildren(this.props.leftPanelChildren, ...)`
*
* The `traverseContext` is an optional argument that is passed through the
* entire traversal. It can be used to store accumulations or anything else that
* the callback might find relevant.
*
* @param {?*} children Children tree object.
* @param {!function} callback To invoke upon traversing each child.
* @param {?*} traverseContext Context for traversal.
* @return {!number} The number of children in this subtree.
*/
function traverseAllChildren(children, callback, traverseContext) {
if (children == null) {
return 0;
}
return traverseAllChildrenImpl(children, '', 0, callback, traverseContext);
}
module.exports = traverseAllChildren;
},{"./ReactElement":67,"./ReactFragment":73,"./ReactInstanceHandles":76,"./getIteratorFn":146,"./invariant":155,"./warning":176}],175:[function(require,module,exports){
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule update
*/
/* global hasOwnProperty:true */
'use strict';
var assign = require("./Object.assign");
var keyOf = require("./keyOf");
var invariant = require("./invariant");
var hasOwnProperty = {}.hasOwnProperty;
function shallowCopy(x) {
if (Array.isArray(x)) {
return x.concat();
} else if (x && typeof x === 'object') {
return assign(new x.constructor(), x);
} else {
return x;
}
}
var COMMAND_PUSH = keyOf({$push: null});
var COMMAND_UNSHIFT = keyOf({$unshift: null});
var COMMAND_SPLICE = keyOf({$splice: null});
var COMMAND_SET = keyOf({$set: null});
var COMMAND_MERGE = keyOf({$merge: null});
var COMMAND_APPLY = keyOf({$apply: null});
var ALL_COMMANDS_LIST = [
COMMAND_PUSH,
COMMAND_UNSHIFT,
COMMAND_SPLICE,
COMMAND_SET,
COMMAND_MERGE,
COMMAND_APPLY
];
var ALL_COMMANDS_SET = {};
ALL_COMMANDS_LIST.forEach(function(command) {
ALL_COMMANDS_SET[command] = true;
});
function invariantArrayCase(value, spec, command) {
("production" !== "production" ? invariant(
Array.isArray(value),
'update(): expected target of %s to be an array; got %s.',
command,
value
) : invariant(Array.isArray(value)));
var specValue = spec[command];
("production" !== "production" ? invariant(
Array.isArray(specValue),
'update(): expected spec of %s to be an array; got %s. ' +
'Did you forget to wrap your parameter in an array?',
command,
specValue
) : invariant(Array.isArray(specValue)));
}
function update(value, spec) {
("production" !== "production" ? invariant(
typeof spec === 'object',
'update(): You provided a key path to update() that did not contain one ' +
'of %s. Did you forget to include {%s: ...}?',
ALL_COMMANDS_LIST.join(', '),
COMMAND_SET
) : invariant(typeof spec === 'object'));
if (hasOwnProperty.call(spec, COMMAND_SET)) {
("production" !== "production" ? invariant(
Object.keys(spec).length === 1,
'Cannot have more than one key in an object with %s',
COMMAND_SET
) : invariant(Object.keys(spec).length === 1));
return spec[COMMAND_SET];
}
var nextValue = shallowCopy(value);
if (hasOwnProperty.call(spec, COMMAND_MERGE)) {
var mergeObj = spec[COMMAND_MERGE];
("production" !== "production" ? invariant(
mergeObj && typeof mergeObj === 'object',
'update(): %s expects a spec of type \'object\'; got %s',
COMMAND_MERGE,
mergeObj
) : invariant(mergeObj && typeof mergeObj === 'object'));
("production" !== "production" ? invariant(
nextValue && typeof nextValue === 'object',
'update(): %s expects a target of type \'object\'; got %s',
COMMAND_MERGE,
nextValue
) : invariant(nextValue && typeof nextValue === 'object'));
assign(nextValue, spec[COMMAND_MERGE]);
}
if (hasOwnProperty.call(spec, COMMAND_PUSH)) {
invariantArrayCase(value, spec, COMMAND_PUSH);
spec[COMMAND_PUSH].forEach(function(item) {
nextValue.push(item);
});
}
if (hasOwnProperty.call(spec, COMMAND_UNSHIFT)) {
invariantArrayCase(value, spec, COMMAND_UNSHIFT);
spec[COMMAND_UNSHIFT].forEach(function(item) {
nextValue.unshift(item);
});
}
if (hasOwnProperty.call(spec, COMMAND_SPLICE)) {
("production" !== "production" ? invariant(
Array.isArray(value),
'Expected %s target to be an array; got %s',
COMMAND_SPLICE,
value
) : invariant(Array.isArray(value)));
("production" !== "production" ? invariant(
Array.isArray(spec[COMMAND_SPLICE]),
'update(): expected spec of %s to be an array of arrays; got %s. ' +
'Did you forget to wrap your parameters in an array?',
COMMAND_SPLICE,
spec[COMMAND_SPLICE]
) : invariant(Array.isArray(spec[COMMAND_SPLICE])));
spec[COMMAND_SPLICE].forEach(function(args) {
("production" !== "production" ? invariant(
Array.isArray(args),
'update(): expected spec of %s to be an array of arrays; got %s. ' +
'Did you forget to wrap your parameters in an array?',
COMMAND_SPLICE,
spec[COMMAND_SPLICE]
) : invariant(Array.isArray(args)));
nextValue.splice.apply(nextValue, args);
});
}
if (hasOwnProperty.call(spec, COMMAND_APPLY)) {
("production" !== "production" ? invariant(
typeof spec[COMMAND_APPLY] === 'function',
'update(): expected spec of %s to be a function; got %s.',
COMMAND_APPLY,
spec[COMMAND_APPLY]
) : invariant(typeof spec[COMMAND_APPLY] === 'function'));
nextValue = spec[COMMAND_APPLY](nextValue);
}
for (var k in spec) {
if (!(ALL_COMMANDS_SET.hasOwnProperty(k) && ALL_COMMANDS_SET[k])) {
nextValue[k] = update(value[k], spec[k]);
}
}
return nextValue;
}
module.exports = update;
},{"./Object.assign":33,"./invariant":155,"./keyOf":162}],176:[function(require,module,exports){
/**
* Copyright 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule warning
*/
"use strict";
var emptyFunction = require("./emptyFunction");
/**
* Similar to invariant but only logs a warning if the condition is not met.
* This can be used to log issues in development environments in critical
* paths. Removing the logging code for production environments will keep the
* same logic and follow the same code paths.
*/
var warning = emptyFunction;
if ("production" !== "production") {
warning = function(condition, format ) {for (var args=[],$__0=2,$__1=arguments.length;$__0<$__1;$__0++) args.push(arguments[$__0]);
if (format === undefined) {
throw new Error(
'`warning(condition, format, ...args)` requires a warning ' +
'message argument'
);
}
if (format.length < 10 || /^[s\W]*$/.test(format)) {
throw new Error(
'The warning format should be able to uniquely identify this ' +
'warning. Please, use a more descriptive format than: ' + format
);
}
if (format.indexOf('Failed Composite propType: ') === 0) {
return; // Ignore CompositeComponent proptype check.
}
if (!condition) {
var argIndex = 0;
var message = 'Warning: ' + format.replace(/%s/g, function() {return args[argIndex++];});
console.warn(message);
try {
// --- Welcome to debugging React ---
// This error was thrown as a convenience so that you can use this stack
// to find the callsite that caused this warning to fire.
throw new Error(message);
} catch(x) {}
}
};
}
module.exports = warning;
},{"./emptyFunction":134}],177:[function(require,module,exports){
/**
* Expose `Emitter`.
*/
module.exports = Emitter;
/**
* Initialize a new `Emitter`.
*
* @api public
*/
function Emitter(obj) {
if (obj) return mixin(obj);
};
/**
* Mixin the emitter properties.
*
* @param {Object} obj
* @return {Object}
* @api private
*/
function mixin(obj) {
for (var key in Emitter.prototype) {
obj[key] = Emitter.prototype[key];
}
return obj;
}
/**
* Listen on the given `event` with `fn`.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.on =
Emitter.prototype.addEventListener = function(event, fn){
this._callbacks = this._callbacks || {};
(this._callbacks[event] = this._callbacks[event] || [])
.push(fn);
return this;
};
/**
* Adds an `event` listener that will be invoked a single
* time then automatically removed.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.once = function(event, fn){
var self = this;
this._callbacks = this._callbacks || {};
function on() {
self.off(event, on);
fn.apply(this, arguments);
}
on.fn = fn;
this.on(event, on);
return this;
};
/**
* Remove the given callback for `event` or all
* registered callbacks.
*
* @param {String} event
* @param {Function} fn
* @return {Emitter}
* @api public
*/
Emitter.prototype.off =
Emitter.prototype.removeListener =
Emitter.prototype.removeAllListeners =
Emitter.prototype.removeEventListener = function(event, fn){
this._callbacks = this._callbacks || {};
// all
if (0 == arguments.length) {
this._callbacks = {};
return this;
}
// specific event
var callbacks = this._callbacks[event];
if (!callbacks) return this;
// remove all handlers
if (1 == arguments.length) {
delete this._callbacks[event];
return this;
}
// remove specific handler
var cb;
for (var i = 0; i < callbacks.length; i++) {
cb = callbacks[i];
if (cb === fn || cb.fn === fn) {
callbacks.splice(i, 1);
break;
}
}
return this;
};
/**
* Emit `event` with the given args.
*
* @param {String} event
* @param {Mixed} ...
* @return {Emitter}
*/
Emitter.prototype.emit = function(event){
this._callbacks = this._callbacks || {};
var args = [].slice.call(arguments, 1)
, callbacks = this._callbacks[event];
if (callbacks) {
callbacks = callbacks.slice(0);
for (var i = 0, len = callbacks.length; i < len; ++i) {
callbacks[i].apply(this, args);
}
}
return this;
};
/**
* Return array of callbacks for `event`.
*
* @param {String} event
* @return {Array}
* @api public
*/
Emitter.prototype.listeners = function(event){
this._callbacks = this._callbacks || {};
return this._callbacks[event] || [];
};
/**
* Check if this emitter has `event` handlers.
*
* @param {String} event
* @return {Boolean}
* @api public
*/
Emitter.prototype.hasListeners = function(event){
return !! this.listeners(event).length;
};
},{}],178:[function(require,module,exports){
/**
* Reduce `arr` with `fn`.
*
* @param {Array} arr
* @param {Function} fn
* @param {Mixed} initial
*
* TODO: combatible error handling?
*/
module.exports = function(arr, fn, initial){
var idx = 0;
var len = arr.length;
var curr = arguments.length == 3
? initial
: arr[idx++];
while (idx < len) {
curr = fn.call(null, curr, arr[idx], ++idx, arr);
}
return curr;
};
},{}],179:[function(require,module,exports){
var ua = typeof window !== 'undefined' ? window.navigator.userAgent : ''
, isOSX = /OS X/.test(ua)
, isOpera = /Opera/.test(ua)
, maybeFirefox = !/like Gecko/.test(ua) && !isOpera
var i, output = module.exports = {
0: isOSX ? '<menu>' : '<UNK>'
, 1: '<mouse 1>'
, 2: '<mouse 2>'
, 3: '<break>'
, 4: '<mouse 3>'
, 5: '<mouse 4>'
, 6: '<mouse 5>'
, 8: '<backspace>'
, 9: '<tab>'
, 12: '<clear>'
, 13: '<enter>'
, 16: '<shift>'
, 17: '<control>'
, 18: '<alt>'
, 19: '<pause>'
, 20: '<caps-lock>'
, 21: '<ime-hangul>'
, 23: '<ime-junja>'
, 24: '<ime-final>'
, 25: '<ime-kanji>'
, 27: '<escape>'
, 28: '<ime-convert>'
, 29: '<ime-nonconvert>'
, 30: '<ime-accept>'
, 31: '<ime-mode-change>'
, 27: '<escape>'
, 32: '<space>'
, 33: '<page-up>'
, 34: '<page-down>'
, 35: '<end>'
, 36: '<home>'
, 37: '<left>'
, 38: '<up>'
, 39: '<right>'
, 40: '<down>'
, 41: '<select>'
, 42: '<print>'
, 43: '<execute>'
, 44: '<snapshot>'
, 45: '<insert>'
, 46: '<delete>'
, 47: '<help>'
, 91: '<meta>' // meta-left -- no one handles left and right properly, so we coerce into one.
, 92: '<meta>' // meta-right
, 93: isOSX ? '<meta>' : '<menu>' // chrome,opera,safari all report this for meta-right (osx mbp).
, 95: '<sleep>'
, 106: '<num-*>'
, 107: '<num-+>'
, 108: '<num-enter>'
, 109: '<num-->'
, 110: '<num-.>'
, 111: '<num-/>'
, 144: '<num-lock>'
, 145: '<scroll-lock>'
, 160: '<shift-left>'
, 161: '<shift-right>'
, 162: '<control-left>'
, 163: '<control-right>'
, 164: '<alt-left>'
, 165: '<alt-right>'
, 166: '<browser-back>'
, 167: '<browser-forward>'
, 168: '<browser-refresh>'
, 169: '<browser-stop>'
, 170: '<browser-search>'
, 171: '<browser-favorites>'
, 172: '<browser-home>'
// ff/osx reports '<volume-mute>' for '-'
, 173: isOSX && maybeFirefox ? '-' : '<volume-mute>'
, 174: '<volume-down>'
, 175: '<volume-up>'
, 176: '<next-track>'
, 177: '<prev-track>'
, 178: '<stop>'
, 179: '<play-pause>'
, 180: '<launch-mail>'
, 181: '<launch-media-select>'
, 182: '<launch-app 1>'
, 183: '<launch-app 2>'
, 186: ';'
, 187: '='
, 188: ','
, 189: '-'
, 190: '.'
, 191: '/'
, 192: '`'
, 219: '['
, 220: '\\'
, 221: ']'
, 222: "'"
, 223: '<meta>'
, 224: '<meta>' // firefox reports meta here.
, 226: '<alt-gr>'
, 229: '<ime-process>'
, 231: isOpera ? '`' : '<unicode>'
, 246: '<attention>'
, 247: '<crsel>'
, 248: '<exsel>'
, 249: '<erase-eof>'
, 250: '<play>'
, 251: '<zoom>'
, 252: '<no-name>'
, 253: '<pa-1>'
, 254: '<clear>'
}
for(i = 58; i < 65; ++i) {
output[i] = String.fromCharCode(i)
}
// 0-9
for(i = 48; i < 58; ++i) {
output[i] = (i - 48)+''
}
// A-Z
for(i = 65; i < 91; ++i) {
output[i] = String.fromCharCode(i)
}
// num0-9
for(i = 96; i < 106; ++i) {
output[i] = '<num-'+(i - 96)+'>'
}
// F1-F24
for(i = 112; i < 136; ++i) {
output[i] = 'F'+(i-111)
}
},{}],"bytes":[function(require,module,exports){
'use strict';
var convertFunction = require('./lib/byte-convert.js');
var parseFunction = require('./lib/byte-parse.js');
/**
* Convert the given value in bytes into a string or parse to string to an integer in bytes.
*
* @constructor
*/
function Bytes() {}
/**
* Convert the given value in bytes into a string.
*
* If the value is negative, it is kept as such. If it is a float, it is rounded.
*
* @param {number} value Value to convert
* @param {{
* case: ?string=,
* thousandsSeparator: ?string=
* }} [options] See byte parser options.
*
* @return {string}
*/
Bytes.prototype.convert = convertFunction;
/**
* Parse the string value into an integer in bytes. If no unit is given, it is assumed the value is in bytes.
*
* @param {number} value
*
* @returns {number|null}
*/
Bytes.prototype.parse = parseFunction;
/**
*Convert the given value in bytes into a string or parse to string to an integer in bytes.
*
* @param {string|number} value
* @param {{
* case: [string],
* thousandsSeparator: [string]
* }} [options] bytes options.
*
* @returns {string|number|null}
*/
function bytes(value, options) {
var bytesObj = new Bytes();
if (typeof value === 'string') {
return bytesObj.parse(value);
}
if (typeof value === 'number') {
return bytesObj.convert(value, options);
}
return null;
}
module.exports = bytes;
},{"./lib/byte-convert.js":2,"./lib/byte-parse.js":3}],"classnames":[function(require,module,exports){
/*!
Copyright (c) 2015 Jed Watson.
Licensed under the MIT License (MIT), see
http://jedwatson.github.io/classnames
*/
function classNames () {
'use strict';
var classes = '';
for (var i = 0; i < arguments.length; i++) {
var arg = arguments[i];
if (!arg) continue;
var argType = typeof arg;
if ('string' === argType || 'number' === argType) {
classes += ' ' + arg;
} else if (Array.isArray(arg)) {
classes += ' ' + classNames.apply(null, arg);
} else if ('object' === argType) {
for (var key in arg) {
if (arg.hasOwnProperty(key) && arg[key]) {
classes += ' ' + key;
}
}
}
}
return classes.substr(1);
}
// safely export classNames for node / browserify
if (typeof module !== 'undefined' && module.exports) {
module.exports = classNames;
}
/* global define */
// safely export classNames for RequireJS
if (typeof define !== 'undefined' && define.amd) {
define('classnames', [], function() {
return classNames;
});
}
},{}],"marked":[function(require,module,exports){
(function (global){
/**
* marked - a markdown parser
* Copyright (c) 2011-2014, Christopher Jeffrey. (MIT Licensed)
* https://github.com/chjj/marked
*/
;(function() {
/**
* Block-Level Grammar
*/
var block = {
newline: /^\n+/,
code: /^( {4}[^\n]+\n*)+/,
fences: noop,
hr: /^( *[-*_]){3,} *(?:\n+|$)/,
heading: /^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/,
nptable: noop,
lheading: /^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,
blockquote: /^( *>[^\n]+(\n(?!def)[^\n]+)*\n*)+/,
list: /^( *)(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,
html: /^ *(?:comment *(?:\n|\s*$)|closed *(?:\n{2,}|\s*$)|closing *(?:\n{2,}|\s*$))/,
def: /^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +["(]([^\n]+)[")])? *(?:\n+|$)/,
table: noop,
paragraph: /^((?:[^\n]+\n?(?!hr|heading|lheading|blockquote|tag|def))+)\n*/,
text: /^[^\n]+/
};
block.bullet = /(?:[*+-]|\d+\.)/;
block.item = /^( *)(bull) [^\n]*(?:\n(?!\1bull )[^\n]*)*/;
block.item = replace(block.item, 'gm')
(/bull/g, block.bullet)
();
block.list = replace(block.list)
(/bull/g, block.bullet)
('hr', '\\n+(?=\\1?(?:[-*_] *){3,}(?:\\n+|$))')
('def', '\\n+(?=' + block.def.source + ')')
();
block.blockquote = replace(block.blockquote)
('def', block.def)
();
block._tag = '(?!(?:'
+ 'a|em|strong|small|s|cite|q|dfn|abbr|data|time|code'
+ '|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo'
+ '|span|br|wbr|ins|del|img)\\b)\\w+(?!:/|[^\\w\\s@]*@)\\b';
block.html = replace(block.html)
('comment', /<!--[\s\S]*?-->/)
('closed', /<(tag)[\s\S]+?<\/\1>/)
('closing', /<tag(?:"[^"]*"|'[^']*'|[^'">])*?>/)
(/tag/g, block._tag)
();
block.paragraph = replace(block.paragraph)
('hr', block.hr)
('heading', block.heading)
('lheading', block.lheading)
('blockquote', block.blockquote)
('tag', '<' + block._tag)
('def', block.def)
();
/**
* Normal Block Grammar
*/
block.normal = merge({}, block);
/**
* GFM Block Grammar
*/
block.gfm = merge({}, block.normal, {
fences: /^ *(`{3,}|~{3,}) *(\S+)? *\n([\s\S]+?)\s*\1 *(?:\n+|$)/,
paragraph: /^/
});
block.gfm.paragraph = replace(block.paragraph)
('(?!', '(?!'
+ block.gfm.fences.source.replace('\\1', '\\2') + '|'
+ block.list.source.replace('\\1', '\\3') + '|')
();
/**
* GFM + Tables Block Grammar
*/
block.tables = merge({}, block.gfm, {
nptable: /^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*/,
table: /^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*/
});
/**
* Block Lexer
*/
function Lexer(options) {
this.tokens = [];
this.tokens.links = {};
this.options = options || marked.defaults;
this.rules = block.normal;
if (this.options.gfm) {
if (this.options.tables) {
this.rules = block.tables;
} else {
this.rules = block.gfm;
}
}
}
/**
* Expose Block Rules
*/
Lexer.rules = block;
/**
* Static Lex Method
*/
Lexer.lex = function(src, options) {
var lexer = new Lexer(options);
return lexer.lex(src);
};
/**
* Preprocessing
*/
Lexer.prototype.lex = function(src) {
src = src
.replace(/\r\n|\r/g, '\n')
.replace(/\t/g, ' ')
.replace(/\u00a0/g, ' ')
.replace(/\u2424/g, '\n');
return this.token(src, true);
};
/**
* Lexing
*/
Lexer.prototype.token = function(src, top, bq) {
var src = src.replace(/^ +$/gm, '')
, next
, loose
, cap
, bull
, b
, item
, space
, i
, l;
while (src) {
// newline
if (cap = this.rules.newline.exec(src)) {
src = src.substring(cap[0].length);
if (cap[0].length > 1) {
this.tokens.push({
type: 'space'
});
}
}
// code
if (cap = this.rules.code.exec(src)) {
src = src.substring(cap[0].length);
cap = cap[0].replace(/^ {4}/gm, '');
this.tokens.push({
type: 'code',
text: !this.options.pedantic
? cap.replace(/\n+$/, '')
: cap
});
continue;
}
// fences (gfm)
if (cap = this.rules.fences.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'code',
lang: cap[2],
text: cap[3]
});
continue;
}
// heading
if (cap = this.rules.heading.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'heading',
depth: cap[1].length,
text: cap[2]
});
continue;
}
// table no leading pipe (gfm)
if (top && (cap = this.rules.nptable.exec(src))) {
src = src.substring(cap[0].length);
item = {
type: 'table',
header: cap[1].replace(/^ *| *\| *$/g, '').split(/ *\| */),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
cells: cap[3].replace(/\n$/, '').split('\n')
};
for (i = 0; i < item.align.length; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
} else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
} else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
} else {
item.align[i] = null;
}
}
for (i = 0; i < item.cells.length; i++) {
item.cells[i] = item.cells[i].split(/ *\| */);
}
this.tokens.push(item);
continue;
}
// lheading
if (cap = this.rules.lheading.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'heading',
depth: cap[2] === '=' ? 1 : 2,
text: cap[1]
});
continue;
}
// hr
if (cap = this.rules.hr.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'hr'
});
continue;
}
// blockquote
if (cap = this.rules.blockquote.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'blockquote_start'
});
cap = cap[0].replace(/^ *> ?/gm, '');
// Pass `top` to keep the current
// "toplevel" state. This is exactly
// how markdown.pl works.
this.token(cap, top, true);
this.tokens.push({
type: 'blockquote_end'
});
continue;
}
// list
if (cap = this.rules.list.exec(src)) {
src = src.substring(cap[0].length);
bull = cap[2];
this.tokens.push({
type: 'list_start',
ordered: bull.length > 1
});
// Get each top-level item.
cap = cap[0].match(this.rules.item);
next = false;
l = cap.length;
i = 0;
for (; i < l; i++) {
item = cap[i];
// Remove the list item's bullet
// so it is seen as the next token.
space = item.length;
item = item.replace(/^ *([*+-]|\d+\.) +/, '');
// Outdent whatever the
// list item contains. Hacky.
if (~item.indexOf('\n ')) {
space -= item.length;
item = !this.options.pedantic
? item.replace(new RegExp('^ {1,' + space + '}', 'gm'), '')
: item.replace(/^ {1,4}/gm, '');
}
// Determine whether the next list item belongs here.
// Backpedal if it does not belong in this list.
if (this.options.smartLists && i !== l - 1) {
b = block.bullet.exec(cap[i + 1])[0];
if (bull !== b && !(bull.length > 1 && b.length > 1)) {
src = cap.slice(i + 1).join('\n') + src;
i = l - 1;
}
}
// Determine whether item is loose or not.
// Use: /(^|\n)(?! )[^\n]+\n\n(?!\s*$)/
// for discount behavior.
loose = next || /\n\n(?!\s*$)/.test(item);
if (i !== l - 1) {
next = item.charAt(item.length - 1) === '\n';
if (!loose) loose = next;
}
this.tokens.push({
type: loose
? 'loose_item_start'
: 'list_item_start'
});
// Recurse.
this.token(item, false, bq);
this.tokens.push({
type: 'list_item_end'
});
}
this.tokens.push({
type: 'list_end'
});
continue;
}
// html
if (cap = this.rules.html.exec(src)) {
src = src.substring(cap[0].length);
this.tokens.push({
type: this.options.sanitize
? 'paragraph'
: 'html',
pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
text: cap[0]
});
continue;
}
// def
if ((!bq && top) && (cap = this.rules.def.exec(src))) {
src = src.substring(cap[0].length);
this.tokens.links[cap[1].toLowerCase()] = {
href: cap[2],
title: cap[3]
};
continue;
}
// table (gfm)
if (top && (cap = this.rules.table.exec(src))) {
src = src.substring(cap[0].length);
item = {
type: 'table',
header: cap[1].replace(/^ *| *\| *$/g, '').split(/ *\| */),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
cells: cap[3].replace(/(?: *\| *)?\n$/, '').split('\n')
};
for (i = 0; i < item.align.length; i++) {
if (/^ *-+: *$/.test(item.align[i])) {
item.align[i] = 'right';
} else if (/^ *:-+: *$/.test(item.align[i])) {
item.align[i] = 'center';
} else if (/^ *:-+ *$/.test(item.align[i])) {
item.align[i] = 'left';
} else {
item.align[i] = null;
}
}
for (i = 0; i < item.cells.length; i++) {
item.cells[i] = item.cells[i]
.replace(/^ *\| *| *\| *$/g, '')
.split(/ *\| */);
}
this.tokens.push(item);
continue;
}
// top-level paragraph
if (top && (cap = this.rules.paragraph.exec(src))) {
src = src.substring(cap[0].length);
this.tokens.push({
type: 'paragraph',
text: cap[1].charAt(cap[1].length - 1) === '\n'
? cap[1].slice(0, -1)
: cap[1]
});
continue;
}
// text
if (cap = this.rules.text.exec(src)) {
// Top-level should never reach here.
src = src.substring(cap[0].length);
this.tokens.push({
type: 'text',
text: cap[0]
});
continue;
}
if (src) {
throw new
Error('Infinite loop on byte: ' + src.charCodeAt(0));
}
}
return this.tokens;
};
/**
* Inline-Level Grammar
*/
var inline = {
escape: /^\\([\\`*{}\[\]()#+\-.!_>])/,
autolink: /^<([^ >]+(@|:\/)[^ >]+)>/,
url: noop,
tag: /^<!--[\s\S]*?-->|^<\/?\w+(?:"[^"]*"|'[^']*'|[^'">])*?>/,
link: /^!?\[(inside)\]\(href\)/,
reflink: /^!?\[(inside)\]\s*\[([^\]]*)\]/,
nolink: /^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]/,
strong: /^__([\s\S]+?)__(?!_)|^\*\*([\s\S]+?)\*\*(?!\*)/,
em: /^\b_((?:__|[\s\S])+?)_\b|^\*((?:\*\*|[\s\S])+?)\*(?!\*)/,
code: /^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)/,
br: /^ {2,}\n(?!\s*$)/,
del: noop,
text: /^[\s\S]+?(?=[\\<!\[_*`]| {2,}\n|$)/
};
inline._inside = /(?:\[[^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*/;
inline._href = /\s*<?([\s\S]*?)>?(?:\s+['"]([\s\S]*?)['"])?\s*/;
inline.link = replace(inline.link)
('inside', inline._inside)
('href', inline._href)
();
inline.reflink = replace(inline.reflink)
('inside', inline._inside)
();
/**
* Normal Inline Grammar
*/
inline.normal = merge({}, inline);
/**
* Pedantic Inline Grammar
*/
inline.pedantic = merge({}, inline.normal, {
strong: /^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
em: /^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/
});
/**
* GFM Inline Grammar
*/
inline.gfm = merge({}, inline.normal, {
escape: replace(inline.escape)('])', '~|])')(),
url: /^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])/,
del: /^~~(?=\S)([\s\S]*?\S)~~/,
text: replace(inline.text)
(']|', '~]|')
('|', '|https?://|')
()
});
/**
* GFM + Line Breaks Inline Grammar
*/
inline.breaks = merge({}, inline.gfm, {
br: replace(inline.br)('{2,}', '*')(),
text: replace(inline.gfm.text)('{2,}', '*')()
});
/**
* Inline Lexer & Compiler
*/
function InlineLexer(links, options) {
this.options = options || marked.defaults;
this.links = links;
this.rules = inline.normal;
this.renderer = this.options.renderer || new Renderer;
this.renderer.options = this.options;
if (!this.links) {
throw new
Error('Tokens array requires a `links` property.');
}
if (this.options.gfm) {
if (this.options.breaks) {
this.rules = inline.breaks;
} else {
this.rules = inline.gfm;
}
} else if (this.options.pedantic) {
this.rules = inline.pedantic;
}
}
/**
* Expose Inline Rules
*/
InlineLexer.rules = inline;
/**
* Static Lexing/Compiling Method
*/
InlineLexer.output = function(src, links, options) {
var inline = new InlineLexer(links, options);
return inline.output(src);
};
/**
* Lexing/Compiling
*/
InlineLexer.prototype.output = function(src) {
var out = ''
, link
, text
, href
, cap;
while (src) {
// escape
if (cap = this.rules.escape.exec(src)) {
src = src.substring(cap[0].length);
out += cap[1];
continue;
}
// autolink
if (cap = this.rules.autolink.exec(src)) {
src = src.substring(cap[0].length);
if (cap[2] === '@') {
text = cap[1].charAt(6) === ':'
? this.mangle(cap[1].substring(7))
: this.mangle(cap[1]);
href = this.mangle('mailto:') + text;
} else {
text = escape(cap[1]);
href = text;
}
out += this.renderer.link(href, null, text);
continue;
}
// url (gfm)
if (!this.inLink && (cap = this.rules.url.exec(src))) {
src = src.substring(cap[0].length);
text = escape(cap[1]);
href = text;
out += this.renderer.link(href, null, text);
continue;
}
// tag
if (cap = this.rules.tag.exec(src)) {
if (!this.inLink && /^<a /i.test(cap[0])) {
this.inLink = true;
} else if (this.inLink && /^<\/a>/i.test(cap[0])) {
this.inLink = false;
}
src = src.substring(cap[0].length);
out += this.options.sanitize
? escape(cap[0])
: cap[0];
continue;
}
// link
if (cap = this.rules.link.exec(src)) {
src = src.substring(cap[0].length);
this.inLink = true;
out += this.outputLink(cap, {
href: cap[2],
title: cap[3]
});
this.inLink = false;
continue;
}
// reflink, nolink
if ((cap = this.rules.reflink.exec(src))
|| (cap = this.rules.nolink.exec(src))) {
src = src.substring(cap[0].length);
link = (cap[2] || cap[1]).replace(/\s+/g, ' ');
link = this.links[link.toLowerCase()];
if (!link || !link.href) {
out += cap[0].charAt(0);
src = cap[0].substring(1) + src;
continue;
}
this.inLink = true;
out += this.outputLink(cap, link);
this.inLink = false;
continue;
}
// strong
if (cap = this.rules.strong.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.strong(this.output(cap[2] || cap[1]));
continue;
}
// em
if (cap = this.rules.em.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.em(this.output(cap[2] || cap[1]));
continue;
}
// code
if (cap = this.rules.code.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.codespan(escape(cap[2], true));
continue;
}
// br
if (cap = this.rules.br.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.br();
continue;
}
// del (gfm)
if (cap = this.rules.del.exec(src)) {
src = src.substring(cap[0].length);
out += this.renderer.del(this.output(cap[1]));
continue;
}
// text
if (cap = this.rules.text.exec(src)) {
src = src.substring(cap[0].length);
out += escape(this.smartypants(cap[0]));
continue;
}
if (src) {
throw new
Error('Infinite loop on byte: ' + src.charCodeAt(0));
}
}
return out;
};
/**
* Compile Link
*/
InlineLexer.prototype.outputLink = function(cap, link) {
var href = escape(link.href)
, title = link.title ? escape(link.title) : null;
return cap[0].charAt(0) !== '!'
? this.renderer.link(href, title, this.output(cap[1]))
: this.renderer.image(href, title, escape(cap[1]));
};
/**
* Smartypants Transformations
*/
InlineLexer.prototype.smartypants = function(text) {
if (!this.options.smartypants) return text;
return text
// em-dashes
.replace(/--/g, '\u2014')
// opening singles
.replace(/(^|[-\u2014/(\[{"\s])'/g, '$1\u2018')
// closing singles & apostrophes
.replace(/'/g, '\u2019')
// opening doubles
.replace(/(^|[-\u2014/(\[{\u2018\s])"/g, '$1\u201c')
// closing doubles
.replace(/"/g, '\u201d')
// ellipses
.replace(/\.{3}/g, '\u2026');
};
/**
* Mangle Links
*/
InlineLexer.prototype.mangle = function(text) {
var out = ''
, l = text.length
, i = 0
, ch;
for (; i < l; i++) {
ch = text.charCodeAt(i);
if (Math.random() > 0.5) {
ch = 'x' + ch.toString(16);
}
out += '&#' + ch + ';';
}
return out;
};
/**
* Renderer
*/
function Renderer(options) {
this.options = options || {};
}
Renderer.prototype.code = function(code, lang, escaped) {
if (this.options.highlight) {
var out = this.options.highlight(code, lang);
if (out != null && out !== code) {
escaped = true;
code = out;
}
}
if (!lang) {
return '<pre><code>'
+ (escaped ? code : escape(code, true))
+ '\n</code></pre>';
}
return '<pre><code class="'
+ this.options.langPrefix
+ escape(lang, true)
+ '">'
+ (escaped ? code : escape(code, true))
+ '\n</code></pre>\n';
};
Renderer.prototype.blockquote = function(quote) {
return '<blockquote>\n' + quote + '</blockquote>\n';
};
Renderer.prototype.html = function(html) {
return html;
};
Renderer.prototype.heading = function(text, level, raw) {
return '<h'
+ level
+ ' id="'
+ this.options.headerPrefix
+ raw.toLowerCase().replace(/[^\w]+/g, '-')
+ '">'
+ text
+ '</h'
+ level
+ '>\n';
};
Renderer.prototype.hr = function() {
return this.options.xhtml ? '<hr/>\n' : '<hr>\n';
};
Renderer.prototype.list = function(body, ordered) {
var type = ordered ? 'ol' : 'ul';
return '<' + type + '>\n' + body + '</' + type + '>\n';
};
Renderer.prototype.listitem = function(text) {
return '<li>' + text + '</li>\n';
};
Renderer.prototype.paragraph = function(text) {
return '<p>' + text + '</p>\n';
};
Renderer.prototype.table = function(header, body) {
return '<table>\n'
+ '<thead>\n'
+ header
+ '</thead>\n'
+ '<tbody>\n'
+ body
+ '</tbody>\n'
+ '</table>\n';
};
Renderer.prototype.tablerow = function(content) {
return '<tr>\n' + content + '</tr>\n';
};
Renderer.prototype.tablecell = function(content, flags) {
var type = flags.header ? 'th' : 'td';
var tag = flags.align
? '<' + type + ' style="text-align:' + flags.align + '">'
: '<' + type + '>';
return tag + content + '</' + type + '>\n';
};
// span level renderer
Renderer.prototype.strong = function(text) {
return '<strong>' + text + '</strong>';
};
Renderer.prototype.em = function(text) {
return '<em>' + text + '</em>';
};
Renderer.prototype.codespan = function(text) {
return '<code>' + text + '</code>';
};
Renderer.prototype.br = function() {
return this.options.xhtml ? '<br/>' : '<br>';
};
Renderer.prototype.del = function(text) {
return '<del>' + text + '</del>';
};
Renderer.prototype.link = function(href, title, text) {
if (this.options.sanitize) {
try {
var prot = decodeURIComponent(unescape(href))
.replace(/[^\w:]/g, '')
.toLowerCase();
} catch (e) {
return '';
}
if (prot.indexOf('javascript:') === 0 || prot.indexOf('vbscript:') === 0) {
return '';
}
}
var out = '<a href="' + href + '"';
if (title) {
out += ' title="' + title + '"';
}
out += '>' + text + '</a>';
return out;
};
Renderer.prototype.image = function(href, title, text) {
var out = '<img src="' + href + '" alt="' + text + '"';
if (title) {
out += ' title="' + title + '"';
}
out += this.options.xhtml ? '/>' : '>';
return out;
};
/**
* Parsing & Compiling
*/
function Parser(options) {
this.tokens = [];
this.token = null;
this.options = options || marked.defaults;
this.options.renderer = this.options.renderer || new Renderer;
this.renderer = this.options.renderer;
this.renderer.options = this.options;
}
/**
* Static Parse Method
*/
Parser.parse = function(src, options, renderer) {
var parser = new Parser(options, renderer);
return parser.parse(src);
};
/**
* Parse Loop
*/
Parser.prototype.parse = function(src) {
this.inline = new InlineLexer(src.links, this.options, this.renderer);
this.tokens = src.reverse();
var out = '';
while (this.next()) {
out += this.tok();
}
return out;
};
/**
* Next Token
*/
Parser.prototype.next = function() {
return this.token = this.tokens.pop();
};
/**
* Preview Next Token
*/
Parser.prototype.peek = function() {
return this.tokens[this.tokens.length - 1] || 0;
};
/**
* Parse Text Tokens
*/
Parser.prototype.parseText = function() {
var body = this.token.text;
while (this.peek().type === 'text') {
body += '\n' + this.next().text;
}
return this.inline.output(body);
};
/**
* Parse Current Token
*/
Parser.prototype.tok = function() {
switch (this.token.type) {
case 'space': {
return '';
}
case 'hr': {
return this.renderer.hr();
}
case 'heading': {
return this.renderer.heading(
this.inline.output(this.token.text),
this.token.depth,
this.token.text);
}
case 'code': {
return this.renderer.code(this.token.text,
this.token.lang,
this.token.escaped);
}
case 'table': {
var header = ''
, body = ''
, i
, row
, cell
, flags
, j;
// header
cell = '';
for (i = 0; i < this.token.header.length; i++) {
flags = { header: true, align: this.token.align[i] };
cell += this.renderer.tablecell(
this.inline.output(this.token.header[i]),
{ header: true, align: this.token.align[i] }
);
}
header += this.renderer.tablerow(cell);
for (i = 0; i < this.token.cells.length; i++) {
row = this.token.cells[i];
cell = '';
for (j = 0; j < row.length; j++) {
cell += this.renderer.tablecell(
this.inline.output(row[j]),
{ header: false, align: this.token.align[j] }
);
}
body += this.renderer.tablerow(cell);
}
return this.renderer.table(header, body);
}
case 'blockquote_start': {
var body = '';
while (this.next().type !== 'blockquote_end') {
body += this.tok();
}
return this.renderer.blockquote(body);
}
case 'list_start': {
var body = ''
, ordered = this.token.ordered;
while (this.next().type !== 'list_end') {
body += this.tok();
}
return this.renderer.list(body, ordered);
}
case 'list_item_start': {
var body = '';
while (this.next().type !== 'list_item_end') {
body += this.token.type === 'text'
? this.parseText()
: this.tok();
}
return this.renderer.listitem(body);
}
case 'loose_item_start': {
var body = '';
while (this.next().type !== 'list_item_end') {
body += this.tok();
}
return this.renderer.listitem(body);
}
case 'html': {
var html = !this.token.pre && !this.options.pedantic
? this.inline.output(this.token.text)
: this.token.text;
return this.renderer.html(html);
}
case 'paragraph': {
return this.renderer.paragraph(this.inline.output(this.token.text));
}
case 'text': {
return this.renderer.paragraph(this.parseText());
}
}
};
/**
* Helpers
*/
function escape(html, encode) {
return html
.replace(!encode ? /&(?!#?\w+;)/g : /&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"')
.replace(/'/g, ''');
}
function unescape(html) {
return html.replace(/&([#\w]+);/g, function(_, n) {
n = n.toLowerCase();
if (n === 'colon') return ':';
if (n.charAt(0) === '#') {
return n.charAt(1) === 'x'
? String.fromCharCode(parseInt(n.substring(2), 16))
: String.fromCharCode(+n.substring(1));
}
return '';
});
}
function replace(regex, opt) {
regex = regex.source;
opt = opt || '';
return function self(name, val) {
if (!name) return new RegExp(regex, opt);
val = val.source || val;
val = val.replace(/(^|[^\[])\^/g, '$1');
regex = regex.replace(name, val);
return self;
};
}
function noop() {}
noop.exec = noop;
function merge(obj) {
var i = 1
, target
, key;
for (; i < arguments.length; i++) {
target = arguments[i];
for (key in target) {
if (Object.prototype.hasOwnProperty.call(target, key)) {
obj[key] = target[key];
}
}
}
return obj;
}
/**
* Marked
*/
function marked(src, opt, callback) {
if (callback || typeof opt === 'function') {
if (!callback) {
callback = opt;
opt = null;
}
opt = merge({}, marked.defaults, opt || {});
var highlight = opt.highlight
, tokens
, pending
, i = 0;
try {
tokens = Lexer.lex(src, opt)
} catch (e) {
return callback(e);
}
pending = tokens.length;
var done = function(err) {
if (err) {
opt.highlight = highlight;
return callback(err);
}
var out;
try {
out = Parser.parse(tokens, opt);
} catch (e) {
err = e;
}
opt.highlight = highlight;
return err
? callback(err)
: callback(null, out);
};
if (!highlight || highlight.length < 3) {
return done();
}
delete opt.highlight;
if (!pending) return done();
for (; i < tokens.length; i++) {
(function(token) {
if (token.type !== 'code') {
return --pending || done();
}
return highlight(token.text, token.lang, function(err, code) {
if (err) return done(err);
if (code == null || code === token.text) {
return --pending || done();
}
token.text = code;
token.escaped = true;
--pending || done();
});
})(tokens[i]);
}
return;
}
try {
if (opt) opt = merge({}, marked.defaults, opt);
return Parser.parse(Lexer.lex(src, opt), opt);
} catch (e) {
e.message += '\nPlease report this to https://github.com/chjj/marked.';
if ((opt || marked.defaults).silent) {
return '<p>An error occured:</p><pre>'
+ escape(e.message + '', true)
+ '</pre>';
}
throw e;
}
}
/**
* Options
*/
marked.options =
marked.setOptions = function(opt) {
merge(marked.defaults, opt);
return marked;
};
marked.defaults = {
gfm: true,
tables: true,
breaks: false,
pedantic: false,
sanitize: false,
smartLists: false,
silent: false,
highlight: null,
langPrefix: 'lang-',
smartypants: false,
headerPrefix: '',
renderer: new Renderer,
xhtml: false
};
/**
* Expose
*/
marked.Parser = Parser;
marked.parser = Parser.parse;
marked.Renderer = Renderer;
marked.Lexer = Lexer;
marked.lexer = Lexer.lex;
marked.InlineLexer = InlineLexer;
marked.inlineLexer = InlineLexer.output;
marked.parse = marked;
if (typeof module !== 'undefined' && typeof exports === 'object') {
module.exports = marked;
} else if (typeof define === 'function' && define.amd) {
define(function() { return marked; });
} else {
this.marked = marked;
}
}).call(function() {
return this || (typeof window !== 'undefined' ? window : global);
}());
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],"moment":[function(require,module,exports){
//! moment.js
//! version : 2.10.3
//! authors : Tim Wood, Iskren Chernev, Moment.js contributors
//! license : MIT
//! momentjs.com
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
global.moment = factory()
}(this, function () { 'use strict';
var hookCallback;
function utils_hooks__hooks () {
return hookCallback.apply(null, arguments);
}
// This is done to register the method called with moment()
// without creating circular dependencies.
function setHookCallback (callback) {
hookCallback = callback;
}
function isArray(input) {
return Object.prototype.toString.call(input) === '[object Array]';
}
function isDate(input) {
return input instanceof Date || Object.prototype.toString.call(input) === '[object Date]';
}
function map(arr, fn) {
var res = [], i;
for (i = 0; i < arr.length; ++i) {
res.push(fn(arr[i], i));
}
return res;
}
function hasOwnProp(a, b) {
return Object.prototype.hasOwnProperty.call(a, b);
}
function extend(a, b) {
for (var i in b) {
if (hasOwnProp(b, i)) {
a[i] = b[i];
}
}
if (hasOwnProp(b, 'toString')) {
a.toString = b.toString;
}
if (hasOwnProp(b, 'valueOf')) {
a.valueOf = b.valueOf;
}
return a;
}
function create_utc__createUTC (input, format, locale, strict) {
return createLocalOrUTC(input, format, locale, strict, true).utc();
}
function defaultParsingFlags() {
// We need to deep clone this object.
return {
empty : false,
unusedTokens : [],
unusedInput : [],
overflow : -2,
charsLeftOver : 0,
nullInput : false,
invalidMonth : null,
invalidFormat : false,
userInvalidated : false,
iso : false
};
}
function getParsingFlags(m) {
if (m._pf == null) {
m._pf = defaultParsingFlags();
}
return m._pf;
}
function valid__isValid(m) {
if (m._isValid == null) {
var flags = getParsingFlags(m);
m._isValid = !isNaN(m._d.getTime()) &&
flags.overflow < 0 &&
!flags.empty &&
!flags.invalidMonth &&
!flags.nullInput &&
!flags.invalidFormat &&
!flags.userInvalidated;
if (m._strict) {
m._isValid = m._isValid &&
flags.charsLeftOver === 0 &&
flags.unusedTokens.length === 0 &&
flags.bigHour === undefined;
}
}
return m._isValid;
}
function valid__createInvalid (flags) {
var m = create_utc__createUTC(NaN);
if (flags != null) {
extend(getParsingFlags(m), flags);
}
else {
getParsingFlags(m).userInvalidated = true;
}
return m;
}
var momentProperties = utils_hooks__hooks.momentProperties = [];
function copyConfig(to, from) {
var i, prop, val;
if (typeof from._isAMomentObject !== 'undefined') {
to._isAMomentObject = from._isAMomentObject;
}
if (typeof from._i !== 'undefined') {
to._i = from._i;
}
if (typeof from._f !== 'undefined') {
to._f = from._f;
}
if (typeof from._l !== 'undefined') {
to._l = from._l;
}
if (typeof from._strict !== 'undefined') {
to._strict = from._strict;
}
if (typeof from._tzm !== 'undefined') {
to._tzm = from._tzm;
}
if (typeof from._isUTC !== 'undefined') {
to._isUTC = from._isUTC;
}
if (typeof from._offset !== 'undefined') {
to._offset = from._offset;
}
if (typeof from._pf !== 'undefined') {
to._pf = getParsingFlags(from);
}
if (typeof from._locale !== 'undefined') {
to._locale = from._locale;
}
if (momentProperties.length > 0) {
for (i in momentProperties) {
prop = momentProperties[i];
val = from[prop];
if (typeof val !== 'undefined') {
to[prop] = val;
}
}
}
return to;
}
var updateInProgress = false;
// Moment prototype object
function Moment(config) {
copyConfig(this, config);
this._d = new Date(+config._d);
// Prevent infinite loop in case updateOffset creates new moment
// objects.
if (updateInProgress === false) {
updateInProgress = true;
utils_hooks__hooks.updateOffset(this);
updateInProgress = false;
}
}
function isMoment (obj) {
return obj instanceof Moment || (obj != null && obj._isAMomentObject != null);
}
function toInt(argumentForCoercion) {
var coercedNumber = +argumentForCoercion,
value = 0;
if (coercedNumber !== 0 && isFinite(coercedNumber)) {
if (coercedNumber >= 0) {
value = Math.floor(coercedNumber);
} else {
value = Math.ceil(coercedNumber);
}
}
return value;
}
function compareArrays(array1, array2, dontConvert) {
var len = Math.min(array1.length, array2.length),
lengthDiff = Math.abs(array1.length - array2.length),
diffs = 0,
i;
for (i = 0; i < len; i++) {
if ((dontConvert && array1[i] !== array2[i]) ||
(!dontConvert && toInt(array1[i]) !== toInt(array2[i]))) {
diffs++;
}
}
return diffs + lengthDiff;
}
function Locale() {
}
var locales = {};
var globalLocale;
function normalizeLocale(key) {
return key ? key.toLowerCase().replace('_', '-') : key;
}
// pick the locale from the array
// try ['en-au', 'en-gb'] as 'en-au', 'en-gb', 'en', as in move through the list trying each
// substring from most specific to least, but move to the next array item if it's a more specific variant than the current root
function chooseLocale(names) {
var i = 0, j, next, locale, split;
while (i < names.length) {
split = normalizeLocale(names[i]).split('-');
j = split.length;
next = normalizeLocale(names[i + 1]);
next = next ? next.split('-') : null;
while (j > 0) {
locale = loadLocale(split.slice(0, j).join('-'));
if (locale) {
return locale;
}
if (next && next.length >= j && compareArrays(split, next, true) >= j - 1) {
//the next array item is better than a shallower substring of this one
break;
}
j--;
}
i++;
}
return null;
}
function loadLocale(name) {
var oldLocale = null;
// TODO: Find a better way to register and load all the locales in Node
if (!locales[name] && typeof module !== 'undefined' &&
module && module.exports) {
try {
oldLocale = globalLocale._abbr;
require('./locale/' + name);
// because defineLocale currently also sets the global locale, we
// want to undo that for lazy loaded locales
locale_locales__getSetGlobalLocale(oldLocale);
} catch (e) { }
}
return locales[name];
}
// This function will load locale and then set the global locale. If
// no arguments are passed in, it will simply return the current global
// locale key.
function locale_locales__getSetGlobalLocale (key, values) {
var data;
if (key) {
if (typeof values === 'undefined') {
data = locale_locales__getLocale(key);
}
else {
data = defineLocale(key, values);
}
if (data) {
// moment.duration._locale = moment._locale = data;
globalLocale = data;
}
}
return globalLocale._abbr;
}
function defineLocale (name, values) {
if (values !== null) {
values.abbr = name;
if (!locales[name]) {
locales[name] = new Locale();
}
locales[name].set(values);
// backwards compat for now: also set the locale
locale_locales__getSetGlobalLocale(name);
return locales[name];
} else {
// useful for testing
delete locales[name];
return null;
}
}
// returns locale data
function locale_locales__getLocale (key) {
var locale;
if (key && key._locale && key._locale._abbr) {
key = key._locale._abbr;
}
if (!key) {
return globalLocale;
}
if (!isArray(key)) {
//short-circuit everything else
locale = loadLocale(key);
if (locale) {
return locale;
}
key = [key];
}
return chooseLocale(key);
}
var aliases = {};
function addUnitAlias (unit, shorthand) {
var lowerCase = unit.toLowerCase();
aliases[lowerCase] = aliases[lowerCase + 's'] = aliases[shorthand] = unit;
}
function normalizeUnits(units) {
return typeof units === 'string' ? aliases[units] || aliases[units.toLowerCase()] : undefined;
}
function normalizeObjectUnits(inputObject) {
var normalizedInput = {},
normalizedProp,
prop;
for (prop in inputObject) {
if (hasOwnProp(inputObject, prop)) {
normalizedProp = normalizeUnits(prop);
if (normalizedProp) {
normalizedInput[normalizedProp] = inputObject[prop];
}
}
}
return normalizedInput;
}
function makeGetSet (unit, keepTime) {
return function (value) {
if (value != null) {
get_set__set(this, unit, value);
utils_hooks__hooks.updateOffset(this, keepTime);
return this;
} else {
return get_set__get(this, unit);
}
};
}
function get_set__get (mom, unit) {
return mom._d['get' + (mom._isUTC ? 'UTC' : '') + unit]();
}
function get_set__set (mom, unit, value) {
return mom._d['set' + (mom._isUTC ? 'UTC' : '') + unit](value);
}
// MOMENTS
function getSet (units, value) {
var unit;
if (typeof units === 'object') {
for (unit in units) {
this.set(unit, units[unit]);
}
} else {
units = normalizeUnits(units);
if (typeof this[units] === 'function') {
return this[units](value);
}
}
return this;
}
function zeroFill(number, targetLength, forceSign) {
var output = '' + Math.abs(number),
sign = number >= 0;
while (output.length < targetLength) {
output = '0' + output;
}
return (sign ? (forceSign ? '+' : '') : '-') + output;
}
var formattingTokens = /(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Q|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|S{1,4}|x|X|zz?|ZZ?|.)/g;
var localFormattingTokens = /(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g;
var formatFunctions = {};
var formatTokenFunctions = {};
// token: 'M'
// padded: ['MM', 2]
// ordinal: 'Mo'
// callback: function () { this.month() + 1 }
function addFormatToken (token, padded, ordinal, callback) {
var func = callback;
if (typeof callback === 'string') {
func = function () {
return this[callback]();
};
}
if (token) {
formatTokenFunctions[token] = func;
}
if (padded) {
formatTokenFunctions[padded[0]] = function () {
return zeroFill(func.apply(this, arguments), padded[1], padded[2]);
};
}
if (ordinal) {
formatTokenFunctions[ordinal] = function () {
return this.localeData().ordinal(func.apply(this, arguments), token);
};
}
}
function removeFormattingTokens(input) {
if (input.match(/\[[\s\S]/)) {
return input.replace(/^\[|\]$/g, '');
}
return input.replace(/\\/g, '');
}
function makeFormatFunction(format) {
var array = format.match(formattingTokens), i, length;
for (i = 0, length = array.length; i < length; i++) {
if (formatTokenFunctions[array[i]]) {
array[i] = formatTokenFunctions[array[i]];
} else {
array[i] = removeFormattingTokens(array[i]);
}
}
return function (mom) {
var output = '';
for (i = 0; i < length; i++) {
output += array[i] instanceof Function ? array[i].call(mom, format) : array[i];
}
return output;
};
}
// format date using native date object
function formatMoment(m, format) {
if (!m.isValid()) {
return m.localeData().invalidDate();
}
format = expandFormat(format, m.localeData());
if (!formatFunctions[format]) {
formatFunctions[format] = makeFormatFunction(format);
}
return formatFunctions[format](m);
}
function expandFormat(format, locale) {
var i = 5;
function replaceLongDateFormatTokens(input) {
return locale.longDateFormat(input) || input;
}
localFormattingTokens.lastIndex = 0;
while (i >= 0 && localFormattingTokens.test(format)) {
format = format.replace(localFormattingTokens, replaceLongDateFormatTokens);
localFormattingTokens.lastIndex = 0;
i -= 1;
}
return format;
}
var match1 = /\d/; // 0 - 9
var match2 = /\d\d/; // 00 - 99
var match3 = /\d{3}/; // 000 - 999
var match4 = /\d{4}/; // 0000 - 9999
var match6 = /[+-]?\d{6}/; // -999999 - 999999
var match1to2 = /\d\d?/; // 0 - 99
var match1to3 = /\d{1,3}/; // 0 - 999
var match1to4 = /\d{1,4}/; // 0 - 9999
var match1to6 = /[+-]?\d{1,6}/; // -999999 - 999999
var matchUnsigned = /\d+/; // 0 - inf
var matchSigned = /[+-]?\d+/; // -inf - inf
var matchOffset = /Z|[+-]\d\d:?\d\d/gi; // +00:00 -00:00 +0000 -0000 or Z
var matchTimestamp = /[+-]?\d+(\.\d{1,3})?/; // 123456789 123456789.123
// any word (or two) characters or numbers including two/three word month in arabic.
var matchWord = /[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i;
var regexes = {};
function addRegexToken (token, regex, strictRegex) {
regexes[token] = typeof regex === 'function' ? regex : function (isStrict) {
return (isStrict && strictRegex) ? strictRegex : regex;
};
}
function getParseRegexForToken (token, config) {
if (!hasOwnProp(regexes, token)) {
return new RegExp(unescapeFormat(token));
}
return regexes[token](config._strict, config._locale);
}
// Code from http://stackoverflow.com/questions/3561493/is-there-a-regexp-escape-function-in-javascript
function unescapeFormat(s) {
return s.replace('\\', '').replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g, function (matched, p1, p2, p3, p4) {
return p1 || p2 || p3 || p4;
}).replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
}
var tokens = {};
function addParseToken (token, callback) {
var i, func = callback;
if (typeof token === 'string') {
token = [token];
}
if (typeof callback === 'number') {
func = function (input, array) {
array[callback] = toInt(input);
};
}
for (i = 0; i < token.length; i++) {
tokens[token[i]] = func;
}
}
function addWeekParseToken (token, callback) {
addParseToken(token, function (input, array, config, token) {
config._w = config._w || {};
callback(input, config._w, config, token);
});
}
function addTimeToArrayFromToken(token, input, config) {
if (input != null && hasOwnProp(tokens, token)) {
tokens[token](input, config._a, config, token);
}
}
var YEAR = 0;
var MONTH = 1;
var DATE = 2;
var HOUR = 3;
var MINUTE = 4;
var SECOND = 5;
var MILLISECOND = 6;
function daysInMonth(year, month) {
return new Date(Date.UTC(year, month + 1, 0)).getUTCDate();
}
// FORMATTING
addFormatToken('M', ['MM', 2], 'Mo', function () {
return this.month() + 1;
});
addFormatToken('MMM', 0, 0, function (format) {
return this.localeData().monthsShort(this, format);
});
addFormatToken('MMMM', 0, 0, function (format) {
return this.localeData().months(this, format);
});
// ALIASES
addUnitAlias('month', 'M');
// PARSING
addRegexToken('M', match1to2);
addRegexToken('MM', match1to2, match2);
addRegexToken('MMM', matchWord);
addRegexToken('MMMM', matchWord);
addParseToken(['M', 'MM'], function (input, array) {
array[MONTH] = toInt(input) - 1;
});
addParseToken(['MMM', 'MMMM'], function (input, array, config, token) {
var month = config._locale.monthsParse(input, token, config._strict);
// if we didn't find a month name, mark the date as invalid.
if (month != null) {
array[MONTH] = month;
} else {
getParsingFlags(config).invalidMonth = input;
}
});
// LOCALES
var defaultLocaleMonths = 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_');
function localeMonths (m) {
return this._months[m.month()];
}
var defaultLocaleMonthsShort = 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_');
function localeMonthsShort (m) {
return this._monthsShort[m.month()];
}
function localeMonthsParse (monthName, format, strict) {
var i, mom, regex;
if (!this._monthsParse) {
this._monthsParse = [];
this._longMonthsParse = [];
this._shortMonthsParse = [];
}
for (i = 0; i < 12; i++) {
// make the regex if we don't have it already
mom = create_utc__createUTC([2000, i]);
if (strict && !this._longMonthsParse[i]) {
this._longMonthsParse[i] = new RegExp('^' + this.months(mom, '').replace('.', '') + '$', 'i');
this._shortMonthsParse[i] = new RegExp('^' + this.monthsShort(mom, '').replace('.', '') + '$', 'i');
}
if (!strict && !this._monthsParse[i]) {
regex = '^' + this.months(mom, '') + '|^' + this.monthsShort(mom, '');
this._monthsParse[i] = new RegExp(regex.replace('.', ''), 'i');
}
// test the regex
if (strict && format === 'MMMM' && this._longMonthsParse[i].test(monthName)) {
return i;
} else if (strict && format === 'MMM' && this._shortMonthsParse[i].test(monthName)) {
return i;
} else if (!strict && this._monthsParse[i].test(monthName)) {
return i;
}
}
}
// MOMENTS
function setMonth (mom, value) {
var dayOfMonth;
// TODO: Move this out of here!
if (typeof value === 'string') {
value = mom.localeData().monthsParse(value);
// TODO: Another silent failure?
if (typeof value !== 'number') {
return mom;
}
}
dayOfMonth = Math.min(mom.date(), daysInMonth(mom.year(), value));
mom._d['set' + (mom._isUTC ? 'UTC' : '') + 'Month'](value, dayOfMonth);
return mom;
}
function getSetMonth (value) {
if (value != null) {
setMonth(this, value);
utils_hooks__hooks.updateOffset(this, true);
return this;
} else {
return get_set__get(this, 'Month');
}
}
function getDaysInMonth () {
return daysInMonth(this.year(), this.month());
}
function checkOverflow (m) {
var overflow;
var a = m._a;
if (a && getParsingFlags(m).overflow === -2) {
overflow =
a[MONTH] < 0 || a[MONTH] > 11 ? MONTH :
a[DATE] < 1 || a[DATE] > daysInMonth(a[YEAR], a[MONTH]) ? DATE :
a[HOUR] < 0 || a[HOUR] > 24 || (a[HOUR] === 24 && (a[MINUTE] !== 0 || a[SECOND] !== 0 || a[MILLISECOND] !== 0)) ? HOUR :
a[MINUTE] < 0 || a[MINUTE] > 59 ? MINUTE :
a[SECOND] < 0 || a[SECOND] > 59 ? SECOND :
a[MILLISECOND] < 0 || a[MILLISECOND] > 999 ? MILLISECOND :
-1;
if (getParsingFlags(m)._overflowDayOfYear && (overflow < YEAR || overflow > DATE)) {
overflow = DATE;
}
getParsingFlags(m).overflow = overflow;
}
return m;
}
function warn(msg) {
if (utils_hooks__hooks.suppressDeprecationWarnings === false && typeof console !== 'undefined' && console.warn) {
console.warn('Deprecation warning: ' + msg);
}
}
function deprecate(msg, fn) {
var firstTime = true,
msgWithStack = msg + '\n' + (new Error()).stack;
return extend(function () {
if (firstTime) {
warn(msgWithStack);
firstTime = false;
}
return fn.apply(this, arguments);
}, fn);
}
var deprecations = {};
function deprecateSimple(name, msg) {
if (!deprecations[name]) {
warn(msg);
deprecations[name] = true;
}
}
utils_hooks__hooks.suppressDeprecationWarnings = false;
var from_string__isoRegex = /^\s*(?:[+-]\d{6}|\d{4})-(?:(\d\d-\d\d)|(W\d\d$)|(W\d\d-\d)|(\d\d\d))((T| )(\d\d(:\d\d(:\d\d(\.\d+)?)?)?)?([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/;
var isoDates = [
['YYYYYY-MM-DD', /[+-]\d{6}-\d{2}-\d{2}/],
['YYYY-MM-DD', /\d{4}-\d{2}-\d{2}/],
['GGGG-[W]WW-E', /\d{4}-W\d{2}-\d/],
['GGGG-[W]WW', /\d{4}-W\d{2}/],
['YYYY-DDD', /\d{4}-\d{3}/]
];
// iso time formats and regexes
var isoTimes = [
['HH:mm:ss.SSSS', /(T| )\d\d:\d\d:\d\d\.\d+/],
['HH:mm:ss', /(T| )\d\d:\d\d:\d\d/],
['HH:mm', /(T| )\d\d:\d\d/],
['HH', /(T| )\d\d/]
];
var aspNetJsonRegex = /^\/?Date\((\-?\d+)/i;
// date from iso format
function configFromISO(config) {
var i, l,
string = config._i,
match = from_string__isoRegex.exec(string);
if (match) {
getParsingFlags(config).iso = true;
for (i = 0, l = isoDates.length; i < l; i++) {
if (isoDates[i][1].exec(string)) {
// match[5] should be 'T' or undefined
config._f = isoDates[i][0] + (match[6] || ' ');
break;
}
}
for (i = 0, l = isoTimes.length; i < l; i++) {
if (isoTimes[i][1].exec(string)) {
config._f += isoTimes[i][0];
break;
}
}
if (string.match(matchOffset)) {
config._f += 'Z';
}
configFromStringAndFormat(config);
} else {
config._isValid = false;
}
}
// date from iso format or fallback
function configFromString(config) {
var matched = aspNetJsonRegex.exec(config._i);
if (matched !== null) {
config._d = new Date(+matched[1]);
return;
}
configFromISO(config);
if (config._isValid === false) {
delete config._isValid;
utils_hooks__hooks.createFromInputFallback(config);
}
}
utils_hooks__hooks.createFromInputFallback = deprecate(
'moment construction falls back to js Date. This is ' +
'discouraged and will be removed in upcoming major ' +
'release. Please refer to ' +
'https://github.com/moment/moment/issues/1407 for more info.',
function (config) {
config._d = new Date(config._i + (config._useUTC ? ' UTC' : ''));
}
);
function createDate (y, m, d, h, M, s, ms) {
//can't just apply() to create a date:
//http://stackoverflow.com/questions/181348/instantiating-a-javascript-object-by-calling-prototype-constructor-apply
var date = new Date(y, m, d, h, M, s, ms);
//the date constructor doesn't accept years < 1970
if (y < 1970) {
date.setFullYear(y);
}
return date;
}
function createUTCDate (y) {
var date = new Date(Date.UTC.apply(null, arguments));
if (y < 1970) {
date.setUTCFullYear(y);
}
return date;
}
addFormatToken(0, ['YY', 2], 0, function () {
return this.year() % 100;
});
addFormatToken(0, ['YYYY', 4], 0, 'year');
addFormatToken(0, ['YYYYY', 5], 0, 'year');
addFormatToken(0, ['YYYYYY', 6, true], 0, 'year');
// ALIASES
addUnitAlias('year', 'y');
// PARSING
addRegexToken('Y', matchSigned);
addRegexToken('YY', match1to2, match2);
addRegexToken('YYYY', match1to4, match4);
addRegexToken('YYYYY', match1to6, match6);
addRegexToken('YYYYYY', match1to6, match6);
addParseToken(['YYYY', 'YYYYY', 'YYYYYY'], YEAR);
addParseToken('YY', function (input, array) {
array[YEAR] = utils_hooks__hooks.parseTwoDigitYear(input);
});
// HELPERS
function daysInYear(year) {
return isLeapYear(year) ? 366 : 365;
}
function isLeapYear(year) {
return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0;
}
// HOOKS
utils_hooks__hooks.parseTwoDigitYear = function (input) {
return toInt(input) + (toInt(input) > 68 ? 1900 : 2000);
};
// MOMENTS
var getSetYear = makeGetSet('FullYear', false);
function getIsLeapYear () {
return isLeapYear(this.year());
}
addFormatToken('w', ['ww', 2], 'wo', 'week');
addFormatToken('W', ['WW', 2], 'Wo', 'isoWeek');
// ALIASES
addUnitAlias('week', 'w');
addUnitAlias('isoWeek', 'W');
// PARSING
addRegexToken('w', match1to2);
addRegexToken('ww', match1to2, match2);
addRegexToken('W', match1to2);
addRegexToken('WW', match1to2, match2);
addWeekParseToken(['w', 'ww', 'W', 'WW'], function (input, week, config, token) {
week[token.substr(0, 1)] = toInt(input);
});
// HELPERS
// firstDayOfWeek 0 = sun, 6 = sat
// the day of the week that starts the week
// (usually sunday or monday)
// firstDayOfWeekOfYear 0 = sun, 6 = sat
// the first week is the week that contains the first
// of this day of the week
// (eg. ISO weeks use thursday (4))
function weekOfYear(mom, firstDayOfWeek, firstDayOfWeekOfYear) {
var end = firstDayOfWeekOfYear - firstDayOfWeek,
daysToDayOfWeek = firstDayOfWeekOfYear - mom.day(),
adjustedMoment;
if (daysToDayOfWeek > end) {
daysToDayOfWeek -= 7;
}
if (daysToDayOfWeek < end - 7) {
daysToDayOfWeek += 7;
}
adjustedMoment = local__createLocal(mom).add(daysToDayOfWeek, 'd');
return {
week: Math.ceil(adjustedMoment.dayOfYear() / 7),
year: adjustedMoment.year()
};
}
// LOCALES
function localeWeek (mom) {
return weekOfYear(mom, this._week.dow, this._week.doy).week;
}
var defaultLocaleWeek = {
dow : 0, // Sunday is the first day of the week.
doy : 6 // The week that contains Jan 1st is the first week of the year.
};
function localeFirstDayOfWeek () {
return this._week.dow;
}
function localeFirstDayOfYear () {
return this._week.doy;
}
// MOMENTS
function getSetWeek (input) {
var week = this.localeData().week(this);
return input == null ? week : this.add((input - week) * 7, 'd');
}
function getSetISOWeek (input) {
var week = weekOfYear(this, 1, 4).week;
return input == null ? week : this.add((input - week) * 7, 'd');
}
addFormatToken('DDD', ['DDDD', 3], 'DDDo', 'dayOfYear');
// ALIASES
addUnitAlias('dayOfYear', 'DDD');
// PARSING
addRegexToken('DDD', match1to3);
addRegexToken('DDDD', match3);
addParseToken(['DDD', 'DDDD'], function (input, array, config) {
config._dayOfYear = toInt(input);
});
// HELPERS
//http://en.wikipedia.org/wiki/ISO_week_date#Calculating_a_date_given_the_year.2C_week_number_and_weekday
function dayOfYearFromWeeks(year, week, weekday, firstDayOfWeekOfYear, firstDayOfWeek) {
var d = createUTCDate(year, 0, 1).getUTCDay();
var daysToAdd;
var dayOfYear;
d = d === 0 ? 7 : d;
weekday = weekday != null ? weekday : firstDayOfWeek;
daysToAdd = firstDayOfWeek - d + (d > firstDayOfWeekOfYear ? 7 : 0) - (d < firstDayOfWeek ? 7 : 0);
dayOfYear = 7 * (week - 1) + (weekday - firstDayOfWeek) + daysToAdd + 1;
return {
year : dayOfYear > 0 ? year : year - 1,
dayOfYear : dayOfYear > 0 ? dayOfYear : daysInYear(year - 1) + dayOfYear
};
}
// MOMENTS
function getSetDayOfYear (input) {
var dayOfYear = Math.round((this.clone().startOf('day') - this.clone().startOf('year')) / 864e5) + 1;
return input == null ? dayOfYear : this.add((input - dayOfYear), 'd');
}
// Pick the first defined of two or three arguments.
function defaults(a, b, c) {
if (a != null) {
return a;
}
if (b != null) {
return b;
}
return c;
}
function currentDateArray(config) {
var now = new Date();
if (config._useUTC) {
return [now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate()];
}
return [now.getFullYear(), now.getMonth(), now.getDate()];
}
// convert an array to a date.
// the array should mirror the parameters below
// note: all values past the year are optional and will default to the lowest possible value.
// [year, month, day , hour, minute, second, millisecond]
function configFromArray (config) {
var i, date, input = [], currentDate, yearToUse;
if (config._d) {
return;
}
currentDate = currentDateArray(config);
//compute day of the year from weeks and weekdays
if (config._w && config._a[DATE] == null && config._a[MONTH] == null) {
dayOfYearFromWeekInfo(config);
}
//if the day of the year is set, figure out what it is
if (config._dayOfYear) {
yearToUse = defaults(config._a[YEAR], currentDate[YEAR]);
if (config._dayOfYear > daysInYear(yearToUse)) {
getParsingFlags(config)._overflowDayOfYear = true;
}
date = createUTCDate(yearToUse, 0, config._dayOfYear);
config._a[MONTH] = date.getUTCMonth();
config._a[DATE] = date.getUTCDate();
}
// Default to current date.
// * if no year, month, day of month are given, default to today
// * if day of month is given, default month and year
// * if month is given, default only year
// * if year is given, don't default anything
for (i = 0; i < 3 && config._a[i] == null; ++i) {
config._a[i] = input[i] = currentDate[i];
}
// Zero out whatever was not defaulted, including time
for (; i < 7; i++) {
config._a[i] = input[i] = (config._a[i] == null) ? (i === 2 ? 1 : 0) : config._a[i];
}
// Check for 24:00:00.000
if (config._a[HOUR] === 24 &&
config._a[MINUTE] === 0 &&
config._a[SECOND] === 0 &&
config._a[MILLISECOND] === 0) {
config._nextDay = true;
config._a[HOUR] = 0;
}
config._d = (config._useUTC ? createUTCDate : createDate).apply(null, input);
// Apply timezone offset from input. The actual utcOffset can be changed
// with parseZone.
if (config._tzm != null) {
config._d.setUTCMinutes(config._d.getUTCMinutes() - config._tzm);
}
if (config._nextDay) {
config._a[HOUR] = 24;
}
}
function dayOfYearFromWeekInfo(config) {
var w, weekYear, week, weekday, dow, doy, temp;
w = config._w;
if (w.GG != null || w.W != null || w.E != null) {
dow = 1;
doy = 4;
// TODO: We need to take the current isoWeekYear, but that depends on
// how we interpret now (local, utc, fixed offset). So create
// a now version of current config (take local/utc/offset flags, and
// create now).
weekYear = defaults(w.GG, config._a[YEAR], weekOfYear(local__createLocal(), 1, 4).year);
week = defaults(w.W, 1);
weekday = defaults(w.E, 1);
} else {
dow = config._locale._week.dow;
doy = config._locale._week.doy;
weekYear = defaults(w.gg, config._a[YEAR], weekOfYear(local__createLocal(), dow, doy).year);
week = defaults(w.w, 1);
if (w.d != null) {
// weekday -- low day numbers are considered next week
weekday = w.d;
if (weekday < dow) {
++week;
}
} else if (w.e != null) {
// local weekday -- counting starts from begining of week
weekday = w.e + dow;
} else {
// default to begining of week
weekday = dow;
}
}
temp = dayOfYearFromWeeks(weekYear, week, weekday, doy, dow);
config._a[YEAR] = temp.year;
config._dayOfYear = temp.dayOfYear;
}
utils_hooks__hooks.ISO_8601 = function () {};
// date from string and format string
function configFromStringAndFormat(config) {
// TODO: Move this to another part of the creation flow to prevent circular deps
if (config._f === utils_hooks__hooks.ISO_8601) {
configFromISO(config);
return;
}
config._a = [];
getParsingFlags(config).empty = true;
// This array is used to make a Date, either with `new Date` or `Date.UTC`
var string = '' + config._i,
i, parsedInput, tokens, token, skipped,
stringLength = string.length,
totalParsedInputLength = 0;
tokens = expandFormat(config._f, config._locale).match(formattingTokens) || [];
for (i = 0; i < tokens.length; i++) {
token = tokens[i];
parsedInput = (string.match(getParseRegexForToken(token, config)) || [])[0];
if (parsedInput) {
skipped = string.substr(0, string.indexOf(parsedInput));
if (skipped.length > 0) {
getParsingFlags(config).unusedInput.push(skipped);
}
string = string.slice(string.indexOf(parsedInput) + parsedInput.length);
totalParsedInputLength += parsedInput.length;
}
// don't parse if it's not a known token
if (formatTokenFunctions[token]) {
if (parsedInput) {
getParsingFlags(config).empty = false;
}
else {
getParsingFlags(config).unusedTokens.push(token);
}
addTimeToArrayFromToken(token, parsedInput, config);
}
else if (config._strict && !parsedInput) {
getParsingFlags(config).unusedTokens.push(token);
}
}
// add remaining unparsed input length to the string
getParsingFlags(config).charsLeftOver = stringLength - totalParsedInputLength;
if (string.length > 0) {
getParsingFlags(config).unusedInput.push(string);
}
// clear _12h flag if hour is <= 12
if (getParsingFlags(config).bigHour === true &&
config._a[HOUR] <= 12 &&
config._a[HOUR] > 0) {
getParsingFlags(config).bigHour = undefined;
}
// handle meridiem
config._a[HOUR] = meridiemFixWrap(config._locale, config._a[HOUR], config._meridiem);
configFromArray(config);
checkOverflow(config);
}
function meridiemFixWrap (locale, hour, meridiem) {
var isPm;
if (meridiem == null) {
// nothing to do
return hour;
}
if (locale.meridiemHour != null) {
return locale.meridiemHour(hour, meridiem);
} else if (locale.isPM != null) {
// Fallback
isPm = locale.isPM(meridiem);
if (isPm && hour < 12) {
hour += 12;
}
if (!isPm && hour === 12) {
hour = 0;
}
return hour;
} else {
// this is not supposed to happen
return hour;
}
}
function configFromStringAndArray(config) {
var tempConfig,
bestMoment,
scoreToBeat,
i,
currentScore;
if (config._f.length === 0) {
getParsingFlags(config).invalidFormat = true;
config._d = new Date(NaN);
return;
}
for (i = 0; i < config._f.length; i++) {
currentScore = 0;
tempConfig = copyConfig({}, config);
if (config._useUTC != null) {
tempConfig._useUTC = config._useUTC;
}
tempConfig._f = config._f[i];
configFromStringAndFormat(tempConfig);
if (!valid__isValid(tempConfig)) {
continue;
}
// if there is any input that was not parsed add a penalty for that format
currentScore += getParsingFlags(tempConfig).charsLeftOver;
//or tokens
currentScore += getParsingFlags(tempConfig).unusedTokens.length * 10;
getParsingFlags(tempConfig).score = currentScore;
if (scoreToBeat == null || currentScore < scoreToBeat) {
scoreToBeat = currentScore;
bestMoment = tempConfig;
}
}
extend(config, bestMoment || tempConfig);
}
function configFromObject(config) {
if (config._d) {
return;
}
var i = normalizeObjectUnits(config._i);
config._a = [i.year, i.month, i.day || i.date, i.hour, i.minute, i.second, i.millisecond];
configFromArray(config);
}
function createFromConfig (config) {
var input = config._i,
format = config._f,
res;
config._locale = config._locale || locale_locales__getLocale(config._l);
if (input === null || (format === undefined && input === '')) {
return valid__createInvalid({nullInput: true});
}
if (typeof input === 'string') {
config._i = input = config._locale.preparse(input);
}
if (isMoment(input)) {
return new Moment(checkOverflow(input));
} else if (isArray(format)) {
configFromStringAndArray(config);
} else if (format) {
configFromStringAndFormat(config);
} else if (isDate(input)) {
config._d = input;
} else {
configFromInput(config);
}
res = new Moment(checkOverflow(config));
if (res._nextDay) {
// Adding is smart enough around DST
res.add(1, 'd');
res._nextDay = undefined;
}
return res;
}
function configFromInput(config) {
var input = config._i;
if (input === undefined) {
config._d = new Date();
} else if (isDate(input)) {
config._d = new Date(+input);
} else if (typeof input === 'string') {
configFromString(config);
} else if (isArray(input)) {
config._a = map(input.slice(0), function (obj) {
return parseInt(obj, 10);
});
configFromArray(config);
} else if (typeof(input) === 'object') {
configFromObject(config);
} else if (typeof(input) === 'number') {
// from milliseconds
config._d = new Date(input);
} else {
utils_hooks__hooks.createFromInputFallback(config);
}
}
function createLocalOrUTC (input, format, locale, strict, isUTC) {
var c = {};
if (typeof(locale) === 'boolean') {
strict = locale;
locale = undefined;
}
// object construction must be done this way.
// https://github.com/moment/moment/issues/1423
c._isAMomentObject = true;
c._useUTC = c._isUTC = isUTC;
c._l = locale;
c._i = input;
c._f = format;
c._strict = strict;
return createFromConfig(c);
}
function local__createLocal (input, format, locale, strict) {
return createLocalOrUTC(input, format, locale, strict, false);
}
var prototypeMin = deprecate(
'moment().min is deprecated, use moment.min instead. https://github.com/moment/moment/issues/1548',
function () {
var other = local__createLocal.apply(null, arguments);
return other < this ? this : other;
}
);
var prototypeMax = deprecate(
'moment().max is deprecated, use moment.max instead. https://github.com/moment/moment/issues/1548',
function () {
var other = local__createLocal.apply(null, arguments);
return other > this ? this : other;
}
);
// Pick a moment m from moments so that m[fn](other) is true for all
// other. This relies on the function fn to be transitive.
//
// moments should either be an array of moment objects or an array, whose
// first element is an array of moment objects.
function pickBy(fn, moments) {
var res, i;
if (moments.length === 1 && isArray(moments[0])) {
moments = moments[0];
}
if (!moments.length) {
return local__createLocal();
}
res = moments[0];
for (i = 1; i < moments.length; ++i) {
if (moments[i][fn](res)) {
res = moments[i];
}
}
return res;
}
// TODO: Use [].sort instead?
function min () {
var args = [].slice.call(arguments, 0);
return pickBy('isBefore', args);
}
function max () {
var args = [].slice.call(arguments, 0);
return pickBy('isAfter', args);
}
function Duration (duration) {
var normalizedInput = normalizeObjectUnits(duration),
years = normalizedInput.year || 0,
quarters = normalizedInput.quarter || 0,
months = normalizedInput.month || 0,
weeks = normalizedInput.week || 0,
days = normalizedInput.day || 0,
hours = normalizedInput.hour || 0,
minutes = normalizedInput.minute || 0,
seconds = normalizedInput.second || 0,
milliseconds = normalizedInput.millisecond || 0;
// representation for dateAddRemove
this._milliseconds = +milliseconds +
seconds * 1e3 + // 1000
minutes * 6e4 + // 1000 * 60
hours * 36e5; // 1000 * 60 * 60
// Because of dateAddRemove treats 24 hours as different from a
// day when working around DST, we need to store them separately
this._days = +days +
weeks * 7;
// It is impossible translate months into days without knowing
// which months you are are talking about, so we have to store
// it separately.
this._months = +months +
quarters * 3 +
years * 12;
this._data = {};
this._locale = locale_locales__getLocale();
this._bubble();
}
function isDuration (obj) {
return obj instanceof Duration;
}
function offset (token, separator) {
addFormatToken(token, 0, 0, function () {
var offset = this.utcOffset();
var sign = '+';
if (offset < 0) {
offset = -offset;
sign = '-';
}
return sign + zeroFill(~~(offset / 60), 2) + separator + zeroFill(~~(offset) % 60, 2);
});
}
offset('Z', ':');
offset('ZZ', '');
// PARSING
addRegexToken('Z', matchOffset);
addRegexToken('ZZ', matchOffset);
addParseToken(['Z', 'ZZ'], function (input, array, config) {
config._useUTC = true;
config._tzm = offsetFromString(input);
});
// HELPERS
// timezone chunker
// '+10:00' > ['10', '00']
// '-1530' > ['-15', '30']
var chunkOffset = /([\+\-]|\d\d)/gi;
function offsetFromString(string) {
var matches = ((string || '').match(matchOffset) || []);
var chunk = matches[matches.length - 1] || [];
var parts = (chunk + '').match(chunkOffset) || ['-', 0, 0];
var minutes = +(parts[1] * 60) + toInt(parts[2]);
return parts[0] === '+' ? minutes : -minutes;
}
// Return a moment from input, that is local/utc/zone equivalent to model.
function cloneWithOffset(input, model) {
var res, diff;
if (model._isUTC) {
res = model.clone();
diff = (isMoment(input) || isDate(input) ? +input : +local__createLocal(input)) - (+res);
// Use low-level api, because this fn is low-level api.
res._d.setTime(+res._d + diff);
utils_hooks__hooks.updateOffset(res, false);
return res;
} else {
return local__createLocal(input).local();
}
return model._isUTC ? local__createLocal(input).zone(model._offset || 0) : local__createLocal(input).local();
}
function getDateOffset (m) {
// On Firefox.24 Date#getTimezoneOffset returns a floating point.
// https://github.com/moment/moment/pull/1871
return -Math.round(m._d.getTimezoneOffset() / 15) * 15;
}
// HOOKS
// This function will be called whenever a moment is mutated.
// It is intended to keep the offset in sync with the timezone.
utils_hooks__hooks.updateOffset = function () {};
// MOMENTS
// keepLocalTime = true means only change the timezone, without
// affecting the local hour. So 5:31:26 +0300 --[utcOffset(2, true)]-->
// 5:31:26 +0200 It is possible that 5:31:26 doesn't exist with offset
// +0200, so we adjust the time as needed, to be valid.
//
// Keeping the time actually adds/subtracts (one hour)
// from the actual represented time. That is why we call updateOffset
// a second time. In case it wants us to change the offset again
// _changeInProgress == true case, then we have to adjust, because
// there is no such time in the given timezone.
function getSetOffset (input, keepLocalTime) {
var offset = this._offset || 0,
localAdjust;
if (input != null) {
if (typeof input === 'string') {
input = offsetFromString(input);
}
if (Math.abs(input) < 16) {
input = input * 60;
}
if (!this._isUTC && keepLocalTime) {
localAdjust = getDateOffset(this);
}
this._offset = input;
this._isUTC = true;
if (localAdjust != null) {
this.add(localAdjust, 'm');
}
if (offset !== input) {
if (!keepLocalTime || this._changeInProgress) {
add_subtract__addSubtract(this, create__createDuration(input - offset, 'm'), 1, false);
} else if (!this._changeInProgress) {
this._changeInProgress = true;
utils_hooks__hooks.updateOffset(this, true);
this._changeInProgress = null;
}
}
return this;
} else {
return this._isUTC ? offset : getDateOffset(this);
}
}
function getSetZone (input, keepLocalTime) {
if (input != null) {
if (typeof input !== 'string') {
input = -input;
}
this.utcOffset(input, keepLocalTime);
return this;
} else {
return -this.utcOffset();
}
}
function setOffsetToUTC (keepLocalTime) {
return this.utcOffset(0, keepLocalTime);
}
function setOffsetToLocal (keepLocalTime) {
if (this._isUTC) {
this.utcOffset(0, keepLocalTime);
this._isUTC = false;
if (keepLocalTime) {
this.subtract(getDateOffset(this), 'm');
}
}
return this;
}
function setOffsetToParsedOffset () {
if (this._tzm) {
this.utcOffset(this._tzm);
} else if (typeof this._i === 'string') {
this.utcOffset(offsetFromString(this._i));
}
return this;
}
function hasAlignedHourOffset (input) {
if (!input) {
input = 0;
}
else {
input = local__createLocal(input).utcOffset();
}
return (this.utcOffset() - input) % 60 === 0;
}
function isDaylightSavingTime () {
return (
this.utcOffset() > this.clone().month(0).utcOffset() ||
this.utcOffset() > this.clone().month(5).utcOffset()
);
}
function isDaylightSavingTimeShifted () {
if (this._a) {
var other = this._isUTC ? create_utc__createUTC(this._a) : local__createLocal(this._a);
return this.isValid() && compareArrays(this._a, other.toArray()) > 0;
}
return false;
}
function isLocal () {
return !this._isUTC;
}
function isUtcOffset () {
return this._isUTC;
}
function isUtc () {
return this._isUTC && this._offset === 0;
}
var aspNetRegex = /(\-)?(?:(\d*)\.)?(\d+)\:(\d+)(?:\:(\d+)\.?(\d{3})?)?/;
// from http://docs.closure-library.googlecode.com/git/closure_goog_date_date.js.source.html
// somewhat more in line with 4.4.3.2 2004 spec, but allows decimal anywhere
var create__isoRegex = /^(-)?P(?:(?:([0-9,.]*)Y)?(?:([0-9,.]*)M)?(?:([0-9,.]*)D)?(?:T(?:([0-9,.]*)H)?(?:([0-9,.]*)M)?(?:([0-9,.]*)S)?)?|([0-9,.]*)W)$/;
function create__createDuration (input, key) {
var duration = input,
// matching against regexp is expensive, do it on demand
match = null,
sign,
ret,
diffRes;
if (isDuration(input)) {
duration = {
ms : input._milliseconds,
d : input._days,
M : input._months
};
} else if (typeof input === 'number') {
duration = {};
if (key) {
duration[key] = input;
} else {
duration.milliseconds = input;
}
} else if (!!(match = aspNetRegex.exec(input))) {
sign = (match[1] === '-') ? -1 : 1;
duration = {
y : 0,
d : toInt(match[DATE]) * sign,
h : toInt(match[HOUR]) * sign,
m : toInt(match[MINUTE]) * sign,
s : toInt(match[SECOND]) * sign,
ms : toInt(match[MILLISECOND]) * sign
};
} else if (!!(match = create__isoRegex.exec(input))) {
sign = (match[1] === '-') ? -1 : 1;
duration = {
y : parseIso(match[2], sign),
M : parseIso(match[3], sign),
d : parseIso(match[4], sign),
h : parseIso(match[5], sign),
m : parseIso(match[6], sign),
s : parseIso(match[7], sign),
w : parseIso(match[8], sign)
};
} else if (duration == null) {// checks for null or undefined
duration = {};
} else if (typeof duration === 'object' && ('from' in duration || 'to' in duration)) {
diffRes = momentsDifference(local__createLocal(duration.from), local__createLocal(duration.to));
duration = {};
duration.ms = diffRes.milliseconds;
duration.M = diffRes.months;
}
ret = new Duration(duration);
if (isDuration(input) && hasOwnProp(input, '_locale')) {
ret._locale = input._locale;
}
return ret;
}
create__createDuration.fn = Duration.prototype;
function parseIso (inp, sign) {
// We'd normally use ~~inp for this, but unfortunately it also
// converts floats to ints.
// inp may be undefined, so careful calling replace on it.
var res = inp && parseFloat(inp.replace(',', '.'));
// apply sign while we're at it
return (isNaN(res) ? 0 : res) * sign;
}
function positiveMomentsDifference(base, other) {
var res = {milliseconds: 0, months: 0};
res.months = other.month() - base.month() +
(other.year() - base.year()) * 12;
if (base.clone().add(res.months, 'M').isAfter(other)) {
--res.months;
}
res.milliseconds = +other - +(base.clone().add(res.months, 'M'));
return res;
}
function momentsDifference(base, other) {
var res;
other = cloneWithOffset(other, base);
if (base.isBefore(other)) {
res = positiveMomentsDifference(base, other);
} else {
res = positiveMomentsDifference(other, base);
res.milliseconds = -res.milliseconds;
res.months = -res.months;
}
return res;
}
function createAdder(direction, name) {
return function (val, period) {
var dur, tmp;
//invert the arguments, but complain about it
if (period !== null && !isNaN(+period)) {
deprecateSimple(name, 'moment().' + name + '(period, number) is deprecated. Please use moment().' + name + '(number, period).');
tmp = val; val = period; period = tmp;
}
val = typeof val === 'string' ? +val : val;
dur = create__createDuration(val, period);
add_subtract__addSubtract(this, dur, direction);
return this;
};
}
function add_subtract__addSubtract (mom, duration, isAdding, updateOffset) {
var milliseconds = duration._milliseconds,
days = duration._days,
months = duration._months;
updateOffset = updateOffset == null ? true : updateOffset;
if (milliseconds) {
mom._d.setTime(+mom._d + milliseconds * isAdding);
}
if (days) {
get_set__set(mom, 'Date', get_set__get(mom, 'Date') + days * isAdding);
}
if (months) {
setMonth(mom, get_set__get(mom, 'Month') + months * isAdding);
}
if (updateOffset) {
utils_hooks__hooks.updateOffset(mom, days || months);
}
}
var add_subtract__add = createAdder(1, 'add');
var add_subtract__subtract = createAdder(-1, 'subtract');
function moment_calendar__calendar (time) {
// We want to compare the start of today, vs this.
// Getting start-of-today depends on whether we're local/utc/offset or not.
var now = time || local__createLocal(),
sod = cloneWithOffset(now, this).startOf('day'),
diff = this.diff(sod, 'days', true),
format = diff < -6 ? 'sameElse' :
diff < -1 ? 'lastWeek' :
diff < 0 ? 'lastDay' :
diff < 1 ? 'sameDay' :
diff < 2 ? 'nextDay' :
diff < 7 ? 'nextWeek' : 'sameElse';
return this.format(this.localeData().calendar(format, this, local__createLocal(now)));
}
function clone () {
return new Moment(this);
}
function isAfter (input, units) {
var inputMs;
units = normalizeUnits(typeof units !== 'undefined' ? units : 'millisecond');
if (units === 'millisecond') {
input = isMoment(input) ? input : local__createLocal(input);
return +this > +input;
} else {
inputMs = isMoment(input) ? +input : +local__createLocal(input);
return inputMs < +this.clone().startOf(units);
}
}
function isBefore (input, units) {
var inputMs;
units = normalizeUnits(typeof units !== 'undefined' ? units : 'millisecond');
if (units === 'millisecond') {
input = isMoment(input) ? input : local__createLocal(input);
return +this < +input;
} else {
inputMs = isMoment(input) ? +input : +local__createLocal(input);
return +this.clone().endOf(units) < inputMs;
}
}
function isBetween (from, to, units) {
return this.isAfter(from, units) && this.isBefore(to, units);
}
function isSame (input, units) {
var inputMs;
units = normalizeUnits(units || 'millisecond');
if (units === 'millisecond') {
input = isMoment(input) ? input : local__createLocal(input);
return +this === +input;
} else {
inputMs = +local__createLocal(input);
return +(this.clone().startOf(units)) <= inputMs && inputMs <= +(this.clone().endOf(units));
}
}
function absFloor (number) {
if (number < 0) {
return Math.ceil(number);
} else {
return Math.floor(number);
}
}
function diff (input, units, asFloat) {
var that = cloneWithOffset(input, this),
zoneDelta = (that.utcOffset() - this.utcOffset()) * 6e4,
delta, output;
units = normalizeUnits(units);
if (units === 'year' || units === 'month' || units === 'quarter') {
output = monthDiff(this, that);
if (units === 'quarter') {
output = output / 3;
} else if (units === 'year') {
output = output / 12;
}
} else {
delta = this - that;
output = units === 'second' ? delta / 1e3 : // 1000
units === 'minute' ? delta / 6e4 : // 1000 * 60
units === 'hour' ? delta / 36e5 : // 1000 * 60 * 60
units === 'day' ? (delta - zoneDelta) / 864e5 : // 1000 * 60 * 60 * 24, negate dst
units === 'week' ? (delta - zoneDelta) / 6048e5 : // 1000 * 60 * 60 * 24 * 7, negate dst
delta;
}
return asFloat ? output : absFloor(output);
}
function monthDiff (a, b) {
// difference in months
var wholeMonthDiff = ((b.year() - a.year()) * 12) + (b.month() - a.month()),
// b is in (anchor - 1 month, anchor + 1 month)
anchor = a.clone().add(wholeMonthDiff, 'months'),
anchor2, adjust;
if (b - anchor < 0) {
anchor2 = a.clone().add(wholeMonthDiff - 1, 'months');
// linear across the month
adjust = (b - anchor) / (anchor - anchor2);
} else {
anchor2 = a.clone().add(wholeMonthDiff + 1, 'months');
// linear across the month
adjust = (b - anchor) / (anchor2 - anchor);
}
return -(wholeMonthDiff + adjust);
}
utils_hooks__hooks.defaultFormat = 'YYYY-MM-DDTHH:mm:ssZ';
function toString () {
return this.clone().locale('en').format('ddd MMM DD YYYY HH:mm:ss [GMT]ZZ');
}
function moment_format__toISOString () {
var m = this.clone().utc();
if (0 < m.year() && m.year() <= 9999) {
if ('function' === typeof Date.prototype.toISOString) {
// native implementation is ~50x faster, use it when we can
return this.toDate().toISOString();
} else {
return formatMoment(m, 'YYYY-MM-DD[T]HH:mm:ss.SSS[Z]');
}
} else {
return formatMoment(m, 'YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]');
}
}
function format (inputString) {
var output = formatMoment(this, inputString || utils_hooks__hooks.defaultFormat);
return this.localeData().postformat(output);
}
function from (time, withoutSuffix) {
if (!this.isValid()) {
return this.localeData().invalidDate();
}
return create__createDuration({to: this, from: time}).locale(this.locale()).humanize(!withoutSuffix);
}
function fromNow (withoutSuffix) {
return this.from(local__createLocal(), withoutSuffix);
}
function to (time, withoutSuffix) {
if (!this.isValid()) {
return this.localeData().invalidDate();
}
return create__createDuration({from: this, to: time}).locale(this.locale()).humanize(!withoutSuffix);
}
function toNow (withoutSuffix) {
return this.to(local__createLocal(), withoutSuffix);
}
function locale (key) {
var newLocaleData;
if (key === undefined) {
return this._locale._abbr;
} else {
newLocaleData = locale_locales__getLocale(key);
if (newLocaleData != null) {
this._locale = newLocaleData;
}
return this;
}
}
var lang = deprecate(
'moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.',
function (key) {
if (key === undefined) {
return this.localeData();
} else {
return this.locale(key);
}
}
);
function localeData () {
return this._locale;
}
function startOf (units) {
units = normalizeUnits(units);
// the following switch intentionally omits break keywords
// to utilize falling through the cases.
switch (units) {
case 'year':
this.month(0);
/* falls through */
case 'quarter':
case 'month':
this.date(1);
/* falls through */
case 'week':
case 'isoWeek':
case 'day':
this.hours(0);
/* falls through */
case 'hour':
this.minutes(0);
/* falls through */
case 'minute':
this.seconds(0);
/* falls through */
case 'second':
this.milliseconds(0);
}
// weeks are a special case
if (units === 'week') {
this.weekday(0);
}
if (units === 'isoWeek') {
this.isoWeekday(1);
}
// quarters are also special
if (units === 'quarter') {
this.month(Math.floor(this.month() / 3) * 3);
}
return this;
}
function endOf (units) {
units = normalizeUnits(units);
if (units === undefined || units === 'millisecond') {
return this;
}
return this.startOf(units).add(1, (units === 'isoWeek' ? 'week' : units)).subtract(1, 'ms');
}
function to_type__valueOf () {
return +this._d - ((this._offset || 0) * 60000);
}
function unix () {
return Math.floor(+this / 1000);
}
function toDate () {
return this._offset ? new Date(+this) : this._d;
}
function toArray () {
var m = this;
return [m.year(), m.month(), m.date(), m.hour(), m.minute(), m.second(), m.millisecond()];
}
function moment_valid__isValid () {
return valid__isValid(this);
}
function parsingFlags () {
return extend({}, getParsingFlags(this));
}
function invalidAt () {
return getParsingFlags(this).overflow;
}
addFormatToken(0, ['gg', 2], 0, function () {
return this.weekYear() % 100;
});
addFormatToken(0, ['GG', 2], 0, function () {
return this.isoWeekYear() % 100;
});
function addWeekYearFormatToken (token, getter) {
addFormatToken(0, [token, token.length], 0, getter);
}
addWeekYearFormatToken('gggg', 'weekYear');
addWeekYearFormatToken('ggggg', 'weekYear');
addWeekYearFormatToken('GGGG', 'isoWeekYear');
addWeekYearFormatToken('GGGGG', 'isoWeekYear');
// ALIASES
addUnitAlias('weekYear', 'gg');
addUnitAlias('isoWeekYear', 'GG');
// PARSING
addRegexToken('G', matchSigned);
addRegexToken('g', matchSigned);
addRegexToken('GG', match1to2, match2);
addRegexToken('gg', match1to2, match2);
addRegexToken('GGGG', match1to4, match4);
addRegexToken('gggg', match1to4, match4);
addRegexToken('GGGGG', match1to6, match6);
addRegexToken('ggggg', match1to6, match6);
addWeekParseToken(['gggg', 'ggggg', 'GGGG', 'GGGGG'], function (input, week, config, token) {
week[token.substr(0, 2)] = toInt(input);
});
addWeekParseToken(['gg', 'GG'], function (input, week, config, token) {
week[token] = utils_hooks__hooks.parseTwoDigitYear(input);
});
// HELPERS
function weeksInYear(year, dow, doy) {
return weekOfYear(local__createLocal([year, 11, 31 + dow - doy]), dow, doy).week;
}
// MOMENTS
function getSetWeekYear (input) {
var year = weekOfYear(this, this.localeData()._week.dow, this.localeData()._week.doy).year;
return input == null ? year : this.add((input - year), 'y');
}
function getSetISOWeekYear (input) {
var year = weekOfYear(this, 1, 4).year;
return input == null ? year : this.add((input - year), 'y');
}
function getISOWeeksInYear () {
return weeksInYear(this.year(), 1, 4);
}
function getWeeksInYear () {
var weekInfo = this.localeData()._week;
return weeksInYear(this.year(), weekInfo.dow, weekInfo.doy);
}
addFormatToken('Q', 0, 0, 'quarter');
// ALIASES
addUnitAlias('quarter', 'Q');
// PARSING
addRegexToken('Q', match1);
addParseToken('Q', function (input, array) {
array[MONTH] = (toInt(input) - 1) * 3;
});
// MOMENTS
function getSetQuarter (input) {
return input == null ? Math.ceil((this.month() + 1) / 3) : this.month((input - 1) * 3 + this.month() % 3);
}
addFormatToken('D', ['DD', 2], 'Do', 'date');
// ALIASES
addUnitAlias('date', 'D');
// PARSING
addRegexToken('D', match1to2);
addRegexToken('DD', match1to2, match2);
addRegexToken('Do', function (isStrict, locale) {
return isStrict ? locale._ordinalParse : locale._ordinalParseLenient;
});
addParseToken(['D', 'DD'], DATE);
addParseToken('Do', function (input, array) {
array[DATE] = toInt(input.match(match1to2)[0], 10);
});
// MOMENTS
var getSetDayOfMonth = makeGetSet('Date', true);
addFormatToken('d', 0, 'do', 'day');
addFormatToken('dd', 0, 0, function (format) {
return this.localeData().weekdaysMin(this, format);
});
addFormatToken('ddd', 0, 0, function (format) {
return this.localeData().weekdaysShort(this, format);
});
addFormatToken('dddd', 0, 0, function (format) {
return this.localeData().weekdays(this, format);
});
addFormatToken('e', 0, 0, 'weekday');
addFormatToken('E', 0, 0, 'isoWeekday');
// ALIASES
addUnitAlias('day', 'd');
addUnitAlias('weekday', 'e');
addUnitAlias('isoWeekday', 'E');
// PARSING
addRegexToken('d', match1to2);
addRegexToken('e', match1to2);
addRegexToken('E', match1to2);
addRegexToken('dd', matchWord);
addRegexToken('ddd', matchWord);
addRegexToken('dddd', matchWord);
addWeekParseToken(['dd', 'ddd', 'dddd'], function (input, week, config) {
var weekday = config._locale.weekdaysParse(input);
// if we didn't get a weekday name, mark the date as invalid
if (weekday != null) {
week.d = weekday;
} else {
getParsingFlags(config).invalidWeekday = input;
}
});
addWeekParseToken(['d', 'e', 'E'], function (input, week, config, token) {
week[token] = toInt(input);
});
// HELPERS
function parseWeekday(input, locale) {
if (typeof input === 'string') {
if (!isNaN(input)) {
input = parseInt(input, 10);
}
else {
input = locale.weekdaysParse(input);
if (typeof input !== 'number') {
return null;
}
}
}
return input;
}
// LOCALES
var defaultLocaleWeekdays = 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_');
function localeWeekdays (m) {
return this._weekdays[m.day()];
}
var defaultLocaleWeekdaysShort = 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_');
function localeWeekdaysShort (m) {
return this._weekdaysShort[m.day()];
}
var defaultLocaleWeekdaysMin = 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_');
function localeWeekdaysMin (m) {
return this._weekdaysMin[m.day()];
}
function localeWeekdaysParse (weekdayName) {
var i, mom, regex;
if (!this._weekdaysParse) {
this._weekdaysParse = [];
}
for (i = 0; i < 7; i++) {
// make the regex if we don't have it already
if (!this._weekdaysParse[i]) {
mom = local__createLocal([2000, 1]).day(i);
regex = '^' + this.weekdays(mom, '') + '|^' + this.weekdaysShort(mom, '') + '|^' + this.weekdaysMin(mom, '');
this._weekdaysParse[i] = new RegExp(regex.replace('.', ''), 'i');
}
// test the regex
if (this._weekdaysParse[i].test(weekdayName)) {
return i;
}
}
}
// MOMENTS
function getSetDayOfWeek (input) {
var day = this._isUTC ? this._d.getUTCDay() : this._d.getDay();
if (input != null) {
input = parseWeekday(input, this.localeData());
return this.add(input - day, 'd');
} else {
return day;
}
}
function getSetLocaleDayOfWeek (input) {
var weekday = (this.day() + 7 - this.localeData()._week.dow) % 7;
return input == null ? weekday : this.add(input - weekday, 'd');
}
function getSetISODayOfWeek (input) {
// behaves the same as moment#day except
// as a getter, returns 7 instead of 0 (1-7 range instead of 0-6)
// as a setter, sunday should belong to the previous week.
return input == null ? this.day() || 7 : this.day(this.day() % 7 ? input : input - 7);
}
addFormatToken('H', ['HH', 2], 0, 'hour');
addFormatToken('h', ['hh', 2], 0, function () {
return this.hours() % 12 || 12;
});
function meridiem (token, lowercase) {
addFormatToken(token, 0, 0, function () {
return this.localeData().meridiem(this.hours(), this.minutes(), lowercase);
});
}
meridiem('a', true);
meridiem('A', false);
// ALIASES
addUnitAlias('hour', 'h');
// PARSING
function matchMeridiem (isStrict, locale) {
return locale._meridiemParse;
}
addRegexToken('a', matchMeridiem);
addRegexToken('A', matchMeridiem);
addRegexToken('H', match1to2);
addRegexToken('h', match1to2);
addRegexToken('HH', match1to2, match2);
addRegexToken('hh', match1to2, match2);
addParseToken(['H', 'HH'], HOUR);
addParseToken(['a', 'A'], function (input, array, config) {
config._isPm = config._locale.isPM(input);
config._meridiem = input;
});
addParseToken(['h', 'hh'], function (input, array, config) {
array[HOUR] = toInt(input);
getParsingFlags(config).bigHour = true;
});
// LOCALES
function localeIsPM (input) {
// IE8 Quirks Mode & IE7 Standards Mode do not allow accessing strings like arrays
// Using charAt should be more compatible.
return ((input + '').toLowerCase().charAt(0) === 'p');
}
var defaultLocaleMeridiemParse = /[ap]\.?m?\.?/i;
function localeMeridiem (hours, minutes, isLower) {
if (hours > 11) {
return isLower ? 'pm' : 'PM';
} else {
return isLower ? 'am' : 'AM';
}
}
// MOMENTS
// Setting the hour should keep the time, because the user explicitly
// specified which hour he wants. So trying to maintain the same hour (in
// a new timezone) makes sense. Adding/subtracting hours does not follow
// this rule.
var getSetHour = makeGetSet('Hours', true);
addFormatToken('m', ['mm', 2], 0, 'minute');
// ALIASES
addUnitAlias('minute', 'm');
// PARSING
addRegexToken('m', match1to2);
addRegexToken('mm', match1to2, match2);
addParseToken(['m', 'mm'], MINUTE);
// MOMENTS
var getSetMinute = makeGetSet('Minutes', false);
addFormatToken('s', ['ss', 2], 0, 'second');
// ALIASES
addUnitAlias('second', 's');
// PARSING
addRegexToken('s', match1to2);
addRegexToken('ss', match1to2, match2);
addParseToken(['s', 'ss'], SECOND);
// MOMENTS
var getSetSecond = makeGetSet('Seconds', false);
addFormatToken('S', 0, 0, function () {
return ~~(this.millisecond() / 100);
});
addFormatToken(0, ['SS', 2], 0, function () {
return ~~(this.millisecond() / 10);
});
function millisecond__milliseconds (token) {
addFormatToken(0, [token, 3], 0, 'millisecond');
}
millisecond__milliseconds('SSS');
millisecond__milliseconds('SSSS');
// ALIASES
addUnitAlias('millisecond', 'ms');
// PARSING
addRegexToken('S', match1to3, match1);
addRegexToken('SS', match1to3, match2);
addRegexToken('SSS', match1to3, match3);
addRegexToken('SSSS', matchUnsigned);
addParseToken(['S', 'SS', 'SSS', 'SSSS'], function (input, array) {
array[MILLISECOND] = toInt(('0.' + input) * 1000);
});
// MOMENTS
var getSetMillisecond = makeGetSet('Milliseconds', false);
addFormatToken('z', 0, 0, 'zoneAbbr');
addFormatToken('zz', 0, 0, 'zoneName');
// MOMENTS
function getZoneAbbr () {
return this._isUTC ? 'UTC' : '';
}
function getZoneName () {
return this._isUTC ? 'Coordinated Universal Time' : '';
}
var momentPrototype__proto = Moment.prototype;
momentPrototype__proto.add = add_subtract__add;
momentPrototype__proto.calendar = moment_calendar__calendar;
momentPrototype__proto.clone = clone;
momentPrototype__proto.diff = diff;
momentPrototype__proto.endOf = endOf;
momentPrototype__proto.format = format;
momentPrototype__proto.from = from;
momentPrototype__proto.fromNow = fromNow;
momentPrototype__proto.to = to;
momentPrototype__proto.toNow = toNow;
momentPrototype__proto.get = getSet;
momentPrototype__proto.invalidAt = invalidAt;
momentPrototype__proto.isAfter = isAfter;
momentPrototype__proto.isBefore = isBefore;
momentPrototype__proto.isBetween = isBetween;
momentPrototype__proto.isSame = isSame;
momentPrototype__proto.isValid = moment_valid__isValid;
momentPrototype__proto.lang = lang;
momentPrototype__proto.locale = locale;
momentPrototype__proto.localeData = localeData;
momentPrototype__proto.max = prototypeMax;
momentPrototype__proto.min = prototypeMin;
momentPrototype__proto.parsingFlags = parsingFlags;
momentPrototype__proto.set = getSet;
momentPrototype__proto.startOf = startOf;
momentPrototype__proto.subtract = add_subtract__subtract;
momentPrototype__proto.toArray = toArray;
momentPrototype__proto.toDate = toDate;
momentPrototype__proto.toISOString = moment_format__toISOString;
momentPrototype__proto.toJSON = moment_format__toISOString;
momentPrototype__proto.toString = toString;
momentPrototype__proto.unix = unix;
momentPrototype__proto.valueOf = to_type__valueOf;
// Year
momentPrototype__proto.year = getSetYear;
momentPrototype__proto.isLeapYear = getIsLeapYear;
// Week Year
momentPrototype__proto.weekYear = getSetWeekYear;
momentPrototype__proto.isoWeekYear = getSetISOWeekYear;
// Quarter
momentPrototype__proto.quarter = momentPrototype__proto.quarters = getSetQuarter;
// Month
momentPrototype__proto.month = getSetMonth;
momentPrototype__proto.daysInMonth = getDaysInMonth;
// Week
momentPrototype__proto.week = momentPrototype__proto.weeks = getSetWeek;
momentPrototype__proto.isoWeek = momentPrototype__proto.isoWeeks = getSetISOWeek;
momentPrototype__proto.weeksInYear = getWeeksInYear;
momentPrototype__proto.isoWeeksInYear = getISOWeeksInYear;
// Day
momentPrototype__proto.date = getSetDayOfMonth;
momentPrototype__proto.day = momentPrototype__proto.days = getSetDayOfWeek;
momentPrototype__proto.weekday = getSetLocaleDayOfWeek;
momentPrototype__proto.isoWeekday = getSetISODayOfWeek;
momentPrototype__proto.dayOfYear = getSetDayOfYear;
// Hour
momentPrototype__proto.hour = momentPrototype__proto.hours = getSetHour;
// Minute
momentPrototype__proto.minute = momentPrototype__proto.minutes = getSetMinute;
// Second
momentPrototype__proto.second = momentPrototype__proto.seconds = getSetSecond;
// Millisecond
momentPrototype__proto.millisecond = momentPrototype__proto.milliseconds = getSetMillisecond;
// Offset
momentPrototype__proto.utcOffset = getSetOffset;
momentPrototype__proto.utc = setOffsetToUTC;
momentPrototype__proto.local = setOffsetToLocal;
momentPrototype__proto.parseZone = setOffsetToParsedOffset;
momentPrototype__proto.hasAlignedHourOffset = hasAlignedHourOffset;
momentPrototype__proto.isDST = isDaylightSavingTime;
momentPrototype__proto.isDSTShifted = isDaylightSavingTimeShifted;
momentPrototype__proto.isLocal = isLocal;
momentPrototype__proto.isUtcOffset = isUtcOffset;
momentPrototype__proto.isUtc = isUtc;
momentPrototype__proto.isUTC = isUtc;
// Timezone
momentPrototype__proto.zoneAbbr = getZoneAbbr;
momentPrototype__proto.zoneName = getZoneName;
// Deprecations
momentPrototype__proto.dates = deprecate('dates accessor is deprecated. Use date instead.', getSetDayOfMonth);
momentPrototype__proto.months = deprecate('months accessor is deprecated. Use month instead', getSetMonth);
momentPrototype__proto.years = deprecate('years accessor is deprecated. Use year instead', getSetYear);
momentPrototype__proto.zone = deprecate('moment().zone is deprecated, use moment().utcOffset instead. https://github.com/moment/moment/issues/1779', getSetZone);
var momentPrototype = momentPrototype__proto;
function moment__createUnix (input) {
return local__createLocal(input * 1000);
}
function moment__createInZone () {
return local__createLocal.apply(null, arguments).parseZone();
}
var defaultCalendar = {
sameDay : '[Today at] LT',
nextDay : '[Tomorrow at] LT',
nextWeek : 'dddd [at] LT',
lastDay : '[Yesterday at] LT',
lastWeek : '[Last] dddd [at] LT',
sameElse : 'L'
};
function locale_calendar__calendar (key, mom, now) {
var output = this._calendar[key];
return typeof output === 'function' ? output.call(mom, now) : output;
}
var defaultLongDateFormat = {
LTS : 'h:mm:ss A',
LT : 'h:mm A',
L : 'MM/DD/YYYY',
LL : 'MMMM D, YYYY',
LLL : 'MMMM D, YYYY LT',
LLLL : 'dddd, MMMM D, YYYY LT'
};
function longDateFormat (key) {
var output = this._longDateFormat[key];
if (!output && this._longDateFormat[key.toUpperCase()]) {
output = this._longDateFormat[key.toUpperCase()].replace(/MMMM|MM|DD|dddd/g, function (val) {
return val.slice(1);
});
this._longDateFormat[key] = output;
}
return output;
}
var defaultInvalidDate = 'Invalid date';
function invalidDate () {
return this._invalidDate;
}
var defaultOrdinal = '%d';
var defaultOrdinalParse = /\d{1,2}/;
function ordinal (number) {
return this._ordinal.replace('%d', number);
}
function preParsePostFormat (string) {
return string;
}
var defaultRelativeTime = {
future : 'in %s',
past : '%s ago',
s : 'a few seconds',
m : 'a minute',
mm : '%d minutes',
h : 'an hour',
hh : '%d hours',
d : 'a day',
dd : '%d days',
M : 'a month',
MM : '%d months',
y : 'a year',
yy : '%d years'
};
function relative__relativeTime (number, withoutSuffix, string, isFuture) {
var output = this._relativeTime[string];
return (typeof output === 'function') ?
output(number, withoutSuffix, string, isFuture) :
output.replace(/%d/i, number);
}
function pastFuture (diff, output) {
var format = this._relativeTime[diff > 0 ? 'future' : 'past'];
return typeof format === 'function' ? format(output) : format.replace(/%s/i, output);
}
function locale_set__set (config) {
var prop, i;
for (i in config) {
prop = config[i];
if (typeof prop === 'function') {
this[i] = prop;
} else {
this['_' + i] = prop;
}
}
// Lenient ordinal parsing accepts just a number in addition to
// number + (possibly) stuff coming from _ordinalParseLenient.
this._ordinalParseLenient = new RegExp(this._ordinalParse.source + '|' + (/\d{1,2}/).source);
}
var prototype__proto = Locale.prototype;
prototype__proto._calendar = defaultCalendar;
prototype__proto.calendar = locale_calendar__calendar;
prototype__proto._longDateFormat = defaultLongDateFormat;
prototype__proto.longDateFormat = longDateFormat;
prototype__proto._invalidDate = defaultInvalidDate;
prototype__proto.invalidDate = invalidDate;
prototype__proto._ordinal = defaultOrdinal;
prototype__proto.ordinal = ordinal;
prototype__proto._ordinalParse = defaultOrdinalParse;
prototype__proto.preparse = preParsePostFormat;
prototype__proto.postformat = preParsePostFormat;
prototype__proto._relativeTime = defaultRelativeTime;
prototype__proto.relativeTime = relative__relativeTime;
prototype__proto.pastFuture = pastFuture;
prototype__proto.set = locale_set__set;
// Month
prototype__proto.months = localeMonths;
prototype__proto._months = defaultLocaleMonths;
prototype__proto.monthsShort = localeMonthsShort;
prototype__proto._monthsShort = defaultLocaleMonthsShort;
prototype__proto.monthsParse = localeMonthsParse;
// Week
prototype__proto.week = localeWeek;
prototype__proto._week = defaultLocaleWeek;
prototype__proto.firstDayOfYear = localeFirstDayOfYear;
prototype__proto.firstDayOfWeek = localeFirstDayOfWeek;
// Day of Week
prototype__proto.weekdays = localeWeekdays;
prototype__proto._weekdays = defaultLocaleWeekdays;
prototype__proto.weekdaysMin = localeWeekdaysMin;
prototype__proto._weekdaysMin = defaultLocaleWeekdaysMin;
prototype__proto.weekdaysShort = localeWeekdaysShort;
prototype__proto._weekdaysShort = defaultLocaleWeekdaysShort;
prototype__proto.weekdaysParse = localeWeekdaysParse;
// Hours
prototype__proto.isPM = localeIsPM;
prototype__proto._meridiemParse = defaultLocaleMeridiemParse;
prototype__proto.meridiem = localeMeridiem;
function lists__get (format, index, field, setter) {
var locale = locale_locales__getLocale();
var utc = create_utc__createUTC().set(setter, index);
return locale[field](utc, format);
}
function list (format, index, field, count, setter) {
if (typeof format === 'number') {
index = format;
format = undefined;
}
format = format || '';
if (index != null) {
return lists__get(format, index, field, setter);
}
var i;
var out = [];
for (i = 0; i < count; i++) {
out[i] = lists__get(format, i, field, setter);
}
return out;
}
function lists__listMonths (format, index) {
return list(format, index, 'months', 12, 'month');
}
function lists__listMonthsShort (format, index) {
return list(format, index, 'monthsShort', 12, 'month');
}
function lists__listWeekdays (format, index) {
return list(format, index, 'weekdays', 7, 'day');
}
function lists__listWeekdaysShort (format, index) {
return list(format, index, 'weekdaysShort', 7, 'day');
}
function lists__listWeekdaysMin (format, index) {
return list(format, index, 'weekdaysMin', 7, 'day');
}
locale_locales__getSetGlobalLocale('en', {
ordinalParse: /\d{1,2}(th|st|nd|rd)/,
ordinal : function (number) {
var b = number % 10,
output = (toInt(number % 100 / 10) === 1) ? 'th' :
(b === 1) ? 'st' :
(b === 2) ? 'nd' :
(b === 3) ? 'rd' : 'th';
return number + output;
}
});
// Side effect imports
utils_hooks__hooks.lang = deprecate('moment.lang is deprecated. Use moment.locale instead.', locale_locales__getSetGlobalLocale);
utils_hooks__hooks.langData = deprecate('moment.langData is deprecated. Use moment.localeData instead.', locale_locales__getLocale);
var mathAbs = Math.abs;
function duration_abs__abs () {
var data = this._data;
this._milliseconds = mathAbs(this._milliseconds);
this._days = mathAbs(this._days);
this._months = mathAbs(this._months);
data.milliseconds = mathAbs(data.milliseconds);
data.seconds = mathAbs(data.seconds);
data.minutes = mathAbs(data.minutes);
data.hours = mathAbs(data.hours);
data.months = mathAbs(data.months);
data.years = mathAbs(data.years);
return this;
}
function duration_add_subtract__addSubtract (duration, input, value, direction) {
var other = create__createDuration(input, value);
duration._milliseconds += direction * other._milliseconds;
duration._days += direction * other._days;
duration._months += direction * other._months;
return duration._bubble();
}
// supports only 2.0-style add(1, 's') or add(duration)
function duration_add_subtract__add (input, value) {
return duration_add_subtract__addSubtract(this, input, value, 1);
}
// supports only 2.0-style subtract(1, 's') or subtract(duration)
function duration_add_subtract__subtract (input, value) {
return duration_add_subtract__addSubtract(this, input, value, -1);
}
function bubble () {
var milliseconds = this._milliseconds;
var days = this._days;
var months = this._months;
var data = this._data;
var seconds, minutes, hours, years = 0;
// The following code bubbles up values, see the tests for
// examples of what that means.
data.milliseconds = milliseconds % 1000;
seconds = absFloor(milliseconds / 1000);
data.seconds = seconds % 60;
minutes = absFloor(seconds / 60);
data.minutes = minutes % 60;
hours = absFloor(minutes / 60);
data.hours = hours % 24;
days += absFloor(hours / 24);
// Accurately convert days to years, assume start from year 0.
years = absFloor(daysToYears(days));
days -= absFloor(yearsToDays(years));
// 30 days to a month
// TODO (iskren): Use anchor date (like 1st Jan) to compute this.
months += absFloor(days / 30);
days %= 30;
// 12 months -> 1 year
years += absFloor(months / 12);
months %= 12;
data.days = days;
data.months = months;
data.years = years;
return this;
}
function daysToYears (days) {
// 400 years have 146097 days (taking into account leap year rules)
return days * 400 / 146097;
}
function yearsToDays (years) {
// years * 365 + absFloor(years / 4) -
// absFloor(years / 100) + absFloor(years / 400);
return years * 146097 / 400;
}
function as (units) {
var days;
var months;
var milliseconds = this._milliseconds;
units = normalizeUnits(units);
if (units === 'month' || units === 'year') {
days = this._days + milliseconds / 864e5;
months = this._months + daysToYears(days) * 12;
return units === 'month' ? months : months / 12;
} else {
// handle milliseconds separately because of floating point math errors (issue #1867)
days = this._days + Math.round(yearsToDays(this._months / 12));
switch (units) {
case 'week' : return days / 7 + milliseconds / 6048e5;
case 'day' : return days + milliseconds / 864e5;
case 'hour' : return days * 24 + milliseconds / 36e5;
case 'minute' : return days * 1440 + milliseconds / 6e4;
case 'second' : return days * 86400 + milliseconds / 1000;
// Math.floor prevents floating point math errors here
case 'millisecond': return Math.floor(days * 864e5) + milliseconds;
default: throw new Error('Unknown unit ' + units);
}
}
}
// TODO: Use this.as('ms')?
function duration_as__valueOf () {
return (
this._milliseconds +
this._days * 864e5 +
(this._months % 12) * 2592e6 +
toInt(this._months / 12) * 31536e6
);
}
function makeAs (alias) {
return function () {
return this.as(alias);
};
}
var asMilliseconds = makeAs('ms');
var asSeconds = makeAs('s');
var asMinutes = makeAs('m');
var asHours = makeAs('h');
var asDays = makeAs('d');
var asWeeks = makeAs('w');
var asMonths = makeAs('M');
var asYears = makeAs('y');
function duration_get__get (units) {
units = normalizeUnits(units);
return this[units + 's']();
}
function makeGetter(name) {
return function () {
return this._data[name];
};
}
var duration_get__milliseconds = makeGetter('milliseconds');
var seconds = makeGetter('seconds');
var minutes = makeGetter('minutes');
var hours = makeGetter('hours');
var days = makeGetter('days');
var months = makeGetter('months');
var years = makeGetter('years');
function weeks () {
return absFloor(this.days() / 7);
}
var round = Math.round;
var thresholds = {
s: 45, // seconds to minute
m: 45, // minutes to hour
h: 22, // hours to day
d: 26, // days to month
M: 11 // months to year
};
// helper function for moment.fn.from, moment.fn.fromNow, and moment.duration.fn.humanize
function substituteTimeAgo(string, number, withoutSuffix, isFuture, locale) {
return locale.relativeTime(number || 1, !!withoutSuffix, string, isFuture);
}
function duration_humanize__relativeTime (posNegDuration, withoutSuffix, locale) {
var duration = create__createDuration(posNegDuration).abs();
var seconds = round(duration.as('s'));
var minutes = round(duration.as('m'));
var hours = round(duration.as('h'));
var days = round(duration.as('d'));
var months = round(duration.as('M'));
var years = round(duration.as('y'));
var a = seconds < thresholds.s && ['s', seconds] ||
minutes === 1 && ['m'] ||
minutes < thresholds.m && ['mm', minutes] ||
hours === 1 && ['h'] ||
hours < thresholds.h && ['hh', hours] ||
days === 1 && ['d'] ||
days < thresholds.d && ['dd', days] ||
months === 1 && ['M'] ||
months < thresholds.M && ['MM', months] ||
years === 1 && ['y'] || ['yy', years];
a[2] = withoutSuffix;
a[3] = +posNegDuration > 0;
a[4] = locale;
return substituteTimeAgo.apply(null, a);
}
// This function allows you to set a threshold for relative time strings
function duration_humanize__getSetRelativeTimeThreshold (threshold, limit) {
if (thresholds[threshold] === undefined) {
return false;
}
if (limit === undefined) {
return thresholds[threshold];
}
thresholds[threshold] = limit;
return true;
}
function humanize (withSuffix) {
var locale = this.localeData();
var output = duration_humanize__relativeTime(this, !withSuffix, locale);
if (withSuffix) {
output = locale.pastFuture(+this, output);
}
return locale.postformat(output);
}
var iso_string__abs = Math.abs;
function iso_string__toISOString() {
// inspired by https://github.com/dordille/moment-isoduration/blob/master/moment.isoduration.js
var Y = iso_string__abs(this.years());
var M = iso_string__abs(this.months());
var D = iso_string__abs(this.days());
var h = iso_string__abs(this.hours());
var m = iso_string__abs(this.minutes());
var s = iso_string__abs(this.seconds() + this.milliseconds() / 1000);
var total = this.asSeconds();
if (!total) {
// this is the same as C#'s (Noda) and python (isodate)...
// but not other JS (goog.date)
return 'P0D';
}
return (total < 0 ? '-' : '') +
'P' +
(Y ? Y + 'Y' : '') +
(M ? M + 'M' : '') +
(D ? D + 'D' : '') +
((h || m || s) ? 'T' : '') +
(h ? h + 'H' : '') +
(m ? m + 'M' : '') +
(s ? s + 'S' : '');
}
var duration_prototype__proto = Duration.prototype;
duration_prototype__proto.abs = duration_abs__abs;
duration_prototype__proto.add = duration_add_subtract__add;
duration_prototype__proto.subtract = duration_add_subtract__subtract;
duration_prototype__proto.as = as;
duration_prototype__proto.asMilliseconds = asMilliseconds;
duration_prototype__proto.asSeconds = asSeconds;
duration_prototype__proto.asMinutes = asMinutes;
duration_prototype__proto.asHours = asHours;
duration_prototype__proto.asDays = asDays;
duration_prototype__proto.asWeeks = asWeeks;
duration_prototype__proto.asMonths = asMonths;
duration_prototype__proto.asYears = asYears;
duration_prototype__proto.valueOf = duration_as__valueOf;
duration_prototype__proto._bubble = bubble;
duration_prototype__proto.get = duration_get__get;
duration_prototype__proto.milliseconds = duration_get__milliseconds;
duration_prototype__proto.seconds = seconds;
duration_prototype__proto.minutes = minutes;
duration_prototype__proto.hours = hours;
duration_prototype__proto.days = days;
duration_prototype__proto.weeks = weeks;
duration_prototype__proto.months = months;
duration_prototype__proto.years = years;
duration_prototype__proto.humanize = humanize;
duration_prototype__proto.toISOString = iso_string__toISOString;
duration_prototype__proto.toString = iso_string__toISOString;
duration_prototype__proto.toJSON = iso_string__toISOString;
duration_prototype__proto.locale = locale;
duration_prototype__proto.localeData = localeData;
// Deprecations
duration_prototype__proto.toIsoString = deprecate('toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)', iso_string__toISOString);
duration_prototype__proto.lang = lang;
// Side effect imports
addFormatToken('X', 0, 0, 'unix');
addFormatToken('x', 0, 0, 'valueOf');
// PARSING
addRegexToken('x', matchSigned);
addRegexToken('X', matchTimestamp);
addParseToken('X', function (input, array, config) {
config._d = new Date(parseFloat(input, 10) * 1000);
});
addParseToken('x', function (input, array, config) {
config._d = new Date(toInt(input));
});
// Side effect imports
utils_hooks__hooks.version = '2.10.3';
setHookCallback(local__createLocal);
utils_hooks__hooks.fn = momentPrototype;
utils_hooks__hooks.min = min;
utils_hooks__hooks.max = max;
utils_hooks__hooks.utc = create_utc__createUTC;
utils_hooks__hooks.unix = moment__createUnix;
utils_hooks__hooks.months = lists__listMonths;
utils_hooks__hooks.isDate = isDate;
utils_hooks__hooks.locale = locale_locales__getSetGlobalLocale;
utils_hooks__hooks.invalid = valid__createInvalid;
utils_hooks__hooks.duration = create__createDuration;
utils_hooks__hooks.isMoment = isMoment;
utils_hooks__hooks.weekdays = lists__listWeekdays;
utils_hooks__hooks.parseZone = moment__createInZone;
utils_hooks__hooks.localeData = locale_locales__getLocale;
utils_hooks__hooks.isDuration = isDuration;
utils_hooks__hooks.monthsShort = lists__listMonthsShort;
utils_hooks__hooks.weekdaysMin = lists__listWeekdaysMin;
utils_hooks__hooks.defineLocale = defineLocale;
utils_hooks__hooks.weekdaysShort = lists__listWeekdaysShort;
utils_hooks__hooks.normalizeUnits = normalizeUnits;
utils_hooks__hooks.relativeTimeThreshold = duration_humanize__getSetRelativeTimeThreshold;
var _moment = utils_hooks__hooks;
return _moment;
}));
},{}],"numeral":[function(require,module,exports){
/*!
* numeral.js
* version : 1.5.3
* author : Adam Draper
* license : MIT
* http://adamwdraper.github.com/Numeral-js/
*/
(function () {
/************************************
Constants
************************************/
var numeral,
VERSION = '1.5.3',
// internal storage for language config files
languages = {},
currentLanguage = 'en',
zeroFormat = null,
defaultFormat = '0,0',
// check for nodeJS
hasModule = (typeof module !== 'undefined' && module.exports);
/************************************
Constructors
************************************/
// Numeral prototype object
function Numeral (number) {
this._value = number;
}
/**
* Implementation of toFixed() that treats floats more like decimals
*
* Fixes binary rounding issues (eg. (0.615).toFixed(2) === '0.61') that present
* problems for accounting- and finance-related software.
*/
function toFixed (value, precision, roundingFunction, optionals) {
var power = Math.pow(10, precision),
optionalsRegExp,
output;
//roundingFunction = (roundingFunction !== undefined ? roundingFunction : Math.round);
// Multiply up by precision, round accurately, then divide and use native toFixed():
output = (roundingFunction(value * power) / power).toFixed(precision);
if (optionals) {
optionalsRegExp = new RegExp('0{1,' + optionals + '}$');
output = output.replace(optionalsRegExp, '');
}
return output;
}
/************************************
Formatting
************************************/
// determine what type of formatting we need to do
function formatNumeral (n, format, roundingFunction) {
var output;
// figure out what kind of format we are dealing with
if (format.indexOf('$') > -1) { // currency!!!!!
output = formatCurrency(n, format, roundingFunction);
} else if (format.indexOf('%') > -1) { // percentage
output = formatPercentage(n, format, roundingFunction);
} else if (format.indexOf(':') > -1) { // time
output = formatTime(n, format);
} else { // plain ol' numbers or bytes
output = formatNumber(n._value, format, roundingFunction);
}
// return string
return output;
}
// revert to number
function unformatNumeral (n, string) {
var stringOriginal = string,
thousandRegExp,
millionRegExp,
billionRegExp,
trillionRegExp,
suffixes = ['KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'],
bytesMultiplier = false,
power;
if (string.indexOf(':') > -1) {
n._value = unformatTime(string);
} else {
if (string === zeroFormat) {
n._value = 0;
} else {
if (languages[currentLanguage].delimiters.decimal !== '.') {
string = string.replace(/\./g,'').replace(languages[currentLanguage].delimiters.decimal, '.');
}
// see if abbreviations are there so that we can multiply to the correct number
thousandRegExp = new RegExp('[^a-zA-Z]' + languages[currentLanguage].abbreviations.thousand + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$');
millionRegExp = new RegExp('[^a-zA-Z]' + languages[currentLanguage].abbreviations.million + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$');
billionRegExp = new RegExp('[^a-zA-Z]' + languages[currentLanguage].abbreviations.billion + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$');
trillionRegExp = new RegExp('[^a-zA-Z]' + languages[currentLanguage].abbreviations.trillion + '(?:\\)|(\\' + languages[currentLanguage].currency.symbol + ')?(?:\\))?)?$');
// see if bytes are there so that we can multiply to the correct number
for (power = 0; power <= suffixes.length; power++) {
bytesMultiplier = (string.indexOf(suffixes[power]) > -1) ? Math.pow(1024, power + 1) : false;
if (bytesMultiplier) {
break;
}
}
// do some math to create our number
n._value = ((bytesMultiplier) ? bytesMultiplier : 1) * ((stringOriginal.match(thousandRegExp)) ? Math.pow(10, 3) : 1) * ((stringOriginal.match(millionRegExp)) ? Math.pow(10, 6) : 1) * ((stringOriginal.match(billionRegExp)) ? Math.pow(10, 9) : 1) * ((stringOriginal.match(trillionRegExp)) ? Math.pow(10, 12) : 1) * ((string.indexOf('%') > -1) ? 0.01 : 1) * (((string.split('-').length + Math.min(string.split('(').length-1, string.split(')').length-1)) % 2)? 1: -1) * Number(string.replace(/[^0-9\.]+/g, ''));
// round if we are talking about bytes
n._value = (bytesMultiplier) ? Math.ceil(n._value) : n._value;
}
}
return n._value;
}
function formatCurrency (n, format, roundingFunction) {
var symbolIndex = format.indexOf('$'),
openParenIndex = format.indexOf('('),
minusSignIndex = format.indexOf('-'),
space = '',
spliceIndex,
output;
// check for space before or after currency
if (format.indexOf(' $') > -1) {
space = ' ';
format = format.replace(' $', '');
} else if (format.indexOf('$ ') > -1) {
space = ' ';
format = format.replace('$ ', '');
} else {
format = format.replace('$', '');
}
// format the number
output = formatNumber(n._value, format, roundingFunction);
// position the symbol
if (symbolIndex <= 1) {
if (output.indexOf('(') > -1 || output.indexOf('-') > -1) {
output = output.split('');
spliceIndex = 1;
if (symbolIndex < openParenIndex || symbolIndex < minusSignIndex){
// the symbol appears before the "(" or "-"
spliceIndex = 0;
}
output.splice(spliceIndex, 0, languages[currentLanguage].currency.symbol + space);
output = output.join('');
} else {
output = languages[currentLanguage].currency.symbol + space + output;
}
} else {
if (output.indexOf(')') > -1) {
output = output.split('');
output.splice(-1, 0, space + languages[currentLanguage].currency.symbol);
output = output.join('');
} else {
output = output + space + languages[currentLanguage].currency.symbol;
}
}
return output;
}
function formatPercentage (n, format, roundingFunction) {
var space = '',
output,
value = n._value * 100;
// check for space before %
if (format.indexOf(' %') > -1) {
space = ' ';
format = format.replace(' %', '');
} else {
format = format.replace('%', '');
}
output = formatNumber(value, format, roundingFunction);
if (output.indexOf(')') > -1 ) {
output = output.split('');
output.splice(-1, 0, space + '%');
output = output.join('');
} else {
output = output + space + '%';
}
return output;
}
function formatTime (n) {
var hours = Math.floor(n._value/60/60),
minutes = Math.floor((n._value - (hours * 60 * 60))/60),
seconds = Math.round(n._value - (hours * 60 * 60) - (minutes * 60));
return hours + ':' + ((minutes < 10) ? '0' + minutes : minutes) + ':' + ((seconds < 10) ? '0' + seconds : seconds);
}
function unformatTime (string) {
var timeArray = string.split(':'),
seconds = 0;
// turn hours and minutes into seconds and add them all up
if (timeArray.length === 3) {
// hours
seconds = seconds + (Number(timeArray[0]) * 60 * 60);
// minutes
seconds = seconds + (Number(timeArray[1]) * 60);
// seconds
seconds = seconds + Number(timeArray[2]);
} else if (timeArray.length === 2) {
// minutes
seconds = seconds + (Number(timeArray[0]) * 60);
// seconds
seconds = seconds + Number(timeArray[1]);
}
return Number(seconds);
}
function formatNumber (value, format, roundingFunction) {
var negP = false,
signed = false,
optDec = false,
abbr = '',
abbrK = false, // force abbreviation to thousands
abbrM = false, // force abbreviation to millions
abbrB = false, // force abbreviation to billions
abbrT = false, // force abbreviation to trillions
abbrForce = false, // force abbreviation
bytes = '',
ord = '',
abs = Math.abs(value),
suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'],
min,
max,
power,
w,
precision,
thousands,
d = '',
neg = false;
// check if number is zero and a custom zero format has been set
if (value === 0 && zeroFormat !== null) {
return zeroFormat;
} else {
// see if we should use parentheses for negative number or if we should prefix with a sign
// if both are present we default to parentheses
if (format.indexOf('(') > -1) {
negP = true;
format = format.slice(1, -1);
} else if (format.indexOf('+') > -1) {
signed = true;
format = format.replace(/\+/g, '');
}
// see if abbreviation is wanted
if (format.indexOf('a') > -1) {
// check if abbreviation is specified
abbrK = format.indexOf('aK') >= 0;
abbrM = format.indexOf('aM') >= 0;
abbrB = format.indexOf('aB') >= 0;
abbrT = format.indexOf('aT') >= 0;
abbrForce = abbrK || abbrM || abbrB || abbrT;
// check for space before abbreviation
if (format.indexOf(' a') > -1) {
abbr = ' ';
format = format.replace(' a', '');
} else {
format = format.replace('a', '');
}
if (abs >= Math.pow(10, 12) && !abbrForce || abbrT) {
// trillion
abbr = abbr + languages[currentLanguage].abbreviations.trillion;
value = value / Math.pow(10, 12);
} else if (abs < Math.pow(10, 12) && abs >= Math.pow(10, 9) && !abbrForce || abbrB) {
// billion
abbr = abbr + languages[currentLanguage].abbreviations.billion;
value = value / Math.pow(10, 9);
} else if (abs < Math.pow(10, 9) && abs >= Math.pow(10, 6) && !abbrForce || abbrM) {
// million
abbr = abbr + languages[currentLanguage].abbreviations.million;
value = value / Math.pow(10, 6);
} else if (abs < Math.pow(10, 6) && abs >= Math.pow(10, 3) && !abbrForce || abbrK) {
// thousand
abbr = abbr + languages[currentLanguage].abbreviations.thousand;
value = value / Math.pow(10, 3);
}
}
// see if we are formatting bytes
if (format.indexOf('b') > -1) {
// check for space before
if (format.indexOf(' b') > -1) {
bytes = ' ';
format = format.replace(' b', '');
} else {
format = format.replace('b', '');
}
for (power = 0; power <= suffixes.length; power++) {
min = Math.pow(1024, power);
max = Math.pow(1024, power+1);
if (value >= min && value < max) {
bytes = bytes + suffixes[power];
if (min > 0) {
value = value / min;
}
break;
}
}
}
// see if ordinal is wanted
if (format.indexOf('o') > -1) {
// check for space before
if (format.indexOf(' o') > -1) {
ord = ' ';
format = format.replace(' o', '');
} else {
format = format.replace('o', '');
}
ord = ord + languages[currentLanguage].ordinal(value);
}
if (format.indexOf('[.]') > -1) {
optDec = true;
format = format.replace('[.]', '.');
}
w = value.toString().split('.')[0];
precision = format.split('.')[1];
thousands = format.indexOf(',');
if (precision) {
if (precision.indexOf('[') > -1) {
precision = precision.replace(']', '');
precision = precision.split('[');
d = toFixed(value, (precision[0].length + precision[1].length), roundingFunction, precision[1].length);
} else {
d = toFixed(value, precision.length, roundingFunction);
}
w = d.split('.')[0];
if (d.split('.')[1].length) {
d = languages[currentLanguage].delimiters.decimal + d.split('.')[1];
} else {
d = '';
}
if (optDec && Number(d.slice(1)) === 0) {
d = '';
}
} else {
w = toFixed(value, null, roundingFunction);
}
// format number
if (w.indexOf('-') > -1) {
w = w.slice(1);
neg = true;
}
if (thousands > -1) {
w = w.toString().replace(/(\d)(?=(\d{3})+(?!\d))/g, '$1' + languages[currentLanguage].delimiters.thousands);
}
if (format.indexOf('.') === 0) {
w = '';
}
return ((negP && neg) ? '(' : '') + ((!negP && neg) ? '-' : '') + ((!neg && signed) ? '+' : '') + w + d + ((ord) ? ord : '') + ((abbr) ? abbr : '') + ((bytes) ? bytes : '') + ((negP && neg) ? ')' : '');
}
}
/************************************
Top Level Functions
************************************/
numeral = function (input) {
if (numeral.isNumeral(input)) {
input = input.value();
} else if (input === 0 || typeof input === 'undefined') {
input = 0;
} else if (!Number(input)) {
input = numeral.fn.unformat(input);
}
return new Numeral(Number(input));
};
// version number
numeral.version = VERSION;
// compare numeral object
numeral.isNumeral = function (obj) {
return obj instanceof Numeral;
};
// This function will load languages and then set the global language. If
// no arguments are passed in, it will simply return the current global
// language key.
numeral.language = function (key, values) {
if (!key) {
return currentLanguage;
}
if (key && !values) {
if(!languages[key]) {
throw new Error('Unknown language : ' + key);
}
currentLanguage = key;
}
if (values || !languages[key]) {
loadLanguage(key, values);
}
return numeral;
};
// This function provides access to the loaded language data. If
// no arguments are passed in, it will simply return the current
// global language object.
numeral.languageData = function (key) {
if (!key) {
return languages[currentLanguage];
}
if (!languages[key]) {
throw new Error('Unknown language : ' + key);
}
return languages[key];
};
numeral.language('en', {
delimiters: {
thousands: ',',
decimal: '.'
},
abbreviations: {
thousand: 'k',
million: 'm',
billion: 'b',
trillion: 't'
},
ordinal: function (number) {
var b = number % 10;
return (~~ (number % 100 / 10) === 1) ? 'th' :
(b === 1) ? 'st' :
(b === 2) ? 'nd' :
(b === 3) ? 'rd' : 'th';
},
currency: {
symbol: '$'
}
});
numeral.zeroFormat = function (format) {
zeroFormat = typeof(format) === 'string' ? format : null;
};
numeral.defaultFormat = function (format) {
defaultFormat = typeof(format) === 'string' ? format : '0.0';
};
/************************************
Helpers
************************************/
function loadLanguage(key, values) {
languages[key] = values;
}
/************************************
Floating-point helpers
************************************/
// The floating-point helper functions and implementation
// borrows heavily from sinful.js: http://guipn.github.io/sinful.js/
/**
* Array.prototype.reduce for browsers that don't support it
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/Reduce#Compatibility
*/
if ('function' !== typeof Array.prototype.reduce) {
Array.prototype.reduce = function (callback, opt_initialValue) {
'use strict';
if (null === this || 'undefined' === typeof this) {
// At the moment all modern browsers, that support strict mode, have
// native implementation of Array.prototype.reduce. For instance, IE8
// does not support strict mode, so this check is actually useless.
throw new TypeError('Array.prototype.reduce called on null or undefined');
}
if ('function' !== typeof callback) {
throw new TypeError(callback + ' is not a function');
}
var index,
value,
length = this.length >>> 0,
isValueSet = false;
if (1 < arguments.length) {
value = opt_initialValue;
isValueSet = true;
}
for (index = 0; length > index; ++index) {
if (this.hasOwnProperty(index)) {
if (isValueSet) {
value = callback(value, this[index], index, this);
} else {
value = this[index];
isValueSet = true;
}
}
}
if (!isValueSet) {
throw new TypeError('Reduce of empty array with no initial value');
}
return value;
};
}
/**
* Computes the multiplier necessary to make x >= 1,
* effectively eliminating miscalculations caused by
* finite precision.
*/
function multiplier(x) {
var parts = x.toString().split('.');
if (parts.length < 2) {
return 1;
}
return Math.pow(10, parts[1].length);
}
/**
* Given a variable number of arguments, returns the maximum
* multiplier that must be used to normalize an operation involving
* all of them.
*/
function correctionFactor() {
var args = Array.prototype.slice.call(arguments);
return args.reduce(function (prev, next) {
var mp = multiplier(prev),
mn = multiplier(next);
return mp > mn ? mp : mn;
}, -Infinity);
}
/************************************
Numeral Prototype
************************************/
numeral.fn = Numeral.prototype = {
clone : function () {
return numeral(this);
},
format : function (inputString, roundingFunction) {
return formatNumeral(this,
inputString ? inputString : defaultFormat,
(roundingFunction !== undefined) ? roundingFunction : Math.round
);
},
unformat : function (inputString) {
if (Object.prototype.toString.call(inputString) === '[object Number]') {
return inputString;
}
return unformatNumeral(this, inputString ? inputString : defaultFormat);
},
value : function () {
return this._value;
},
valueOf : function () {
return this._value;
},
set : function (value) {
this._value = Number(value);
return this;
},
add : function (value) {
var corrFactor = correctionFactor.call(null, this._value, value);
function cback(accum, curr, currI, O) {
return accum + corrFactor * curr;
}
this._value = [this._value, value].reduce(cback, 0) / corrFactor;
return this;
},
subtract : function (value) {
var corrFactor = correctionFactor.call(null, this._value, value);
function cback(accum, curr, currI, O) {
return accum - corrFactor * curr;
}
this._value = [value].reduce(cback, this._value * corrFactor) / corrFactor;
return this;
},
multiply : function (value) {
function cback(accum, curr, currI, O) {
var corrFactor = correctionFactor(accum, curr);
return (accum * corrFactor) * (curr * corrFactor) /
(corrFactor * corrFactor);
}
this._value = [this._value, value].reduce(cback, 1);
return this;
},
divide : function (value) {
function cback(accum, curr, currI, O) {
var corrFactor = correctionFactor(accum, curr);
return (accum * corrFactor) / (curr * corrFactor);
}
this._value = [this._value, value].reduce(cback);
return this;
},
difference : function (value) {
return Math.abs(numeral(this._value).subtract(value).value());
}
};
/************************************
Exposing Numeral
************************************/
// CommonJS module is defined
if (hasModule) {
module.exports = numeral;
}
/*global ender:false */
if (typeof ender === 'undefined') {
// here, `this` means `window` in the browser, or `global` on the server
// add `numeral` as a global object via a string identifier,
// for Closure Compiler 'advanced' mode
this['numeral'] = numeral;
}
/*global define:false */
if (typeof define === 'function' && define.amd) {
define([], function () {
return numeral;
});
}
}).call(this);
},{}],"pikaday":[function(require,module,exports){
/*!
* Pikaday
*
* Copyright © 2014 David Bushell | BSD & MIT license | https://github.com/dbushell/Pikaday
*/
(function (root, factory)
{
'use strict';
var moment;
if (typeof exports === 'object') {
// CommonJS module
// Load moment.js as an optional dependency
try { moment = require('moment'); } catch (e) {}
module.exports = factory(moment);
} else if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(function (req)
{
// Load moment.js as an optional dependency
var id = 'moment';
try { moment = req(id); } catch (e) {}
return factory(moment);
});
} else {
root.Pikaday = factory(root.moment);
}
}(this, function (moment)
{
'use strict';
/**
* feature detection and helper functions
*/
var hasMoment = typeof moment === 'function',
hasEventListeners = !!window.addEventListener,
document = window.document,
sto = window.setTimeout,
addEvent = function(el, e, callback, capture)
{
if (hasEventListeners) {
el.addEventListener(e, callback, !!capture);
} else {
el.attachEvent('on' + e, callback);
}
},
removeEvent = function(el, e, callback, capture)
{
if (hasEventListeners) {
el.removeEventListener(e, callback, !!capture);
} else {
el.detachEvent('on' + e, callback);
}
},
fireEvent = function(el, eventName, data)
{
var ev;
if (document.createEvent) {
ev = document.createEvent('HTMLEvents');
ev.initEvent(eventName, true, false);
ev = extend(ev, data);
el.dispatchEvent(ev);
} else if (document.createEventObject) {
ev = document.createEventObject();
ev = extend(ev, data);
el.fireEvent('on' + eventName, ev);
}
},
trim = function(str)
{
return str.trim ? str.trim() : str.replace(/^\s+|\s+$/g,'');
},
hasClass = function(el, cn)
{
return (' ' + el.className + ' ').indexOf(' ' + cn + ' ') !== -1;
},
addClass = function(el, cn)
{
if (!hasClass(el, cn)) {
el.className = (el.className === '') ? cn : el.className + ' ' + cn;
}
},
removeClass = function(el, cn)
{
el.className = trim((' ' + el.className + ' ').replace(' ' + cn + ' ', ' '));
},
isArray = function(obj)
{
return (/Array/).test(Object.prototype.toString.call(obj));
},
isDate = function(obj)
{
return (/Date/).test(Object.prototype.toString.call(obj)) && !isNaN(obj.getTime());
},
isWeekend = function(date)
{
var day = date.getDay();
return day === 0 || day === 6;
},
isLeapYear = function(year)
{
// solution by Matti Virkkunen: http://stackoverflow.com/a/4881951
return year % 4 === 0 && year % 100 !== 0 || year % 400 === 0;
},
getDaysInMonth = function(year, month)
{
return [31, isLeapYear(year) ? 29 : 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month];
},
setToStartOfDay = function(date)
{
if (isDate(date)) date.setHours(0,0,0,0);
},
compareDates = function(a,b)
{
// weak date comparison (use setToStartOfDay(date) to ensure correct result)
return a.getTime() === b.getTime();
},
extend = function(to, from, overwrite)
{
var prop, hasProp;
for (prop in from) {
hasProp = to[prop] !== undefined;
if (hasProp && typeof from[prop] === 'object' && from[prop] !== null && from[prop].nodeName === undefined) {
if (isDate(from[prop])) {
if (overwrite) {
to[prop] = new Date(from[prop].getTime());
}
}
else if (isArray(from[prop])) {
if (overwrite) {
to[prop] = from[prop].slice(0);
}
} else {
to[prop] = extend({}, from[prop], overwrite);
}
} else if (overwrite || !hasProp) {
to[prop] = from[prop];
}
}
return to;
},
adjustCalendar = function(calendar) {
if (calendar.month < 0) {
calendar.year -= Math.ceil(Math.abs(calendar.month)/12);
calendar.month += 12;
}
if (calendar.month > 11) {
calendar.year += Math.floor(Math.abs(calendar.month)/12);
calendar.month -= 12;
}
return calendar;
},
/**
* defaults and localisation
*/
defaults = {
// bind the picker to a form field
field: null,
// automatically show/hide the picker on `field` focus (default `true` if `field` is set)
bound: undefined,
// position of the datepicker, relative to the field (default to bottom & left)
// ('bottom' & 'left' keywords are not used, 'top' & 'right' are modifier on the bottom/left position)
position: 'bottom left',
// automatically fit in the viewport even if it means repositioning from the position option
reposition: true,
// the default output format for `.toString()` and `field` value
format: 'YYYY-MM-DD',
// the initial date to view when first opened
defaultDate: null,
// make the `defaultDate` the initial selected value
setDefaultDate: false,
// first day of week (0: Sunday, 1: Monday etc)
firstDay: 0,
// the minimum/earliest date that can be selected
minDate: null,
// the maximum/latest date that can be selected
maxDate: null,
// number of years either side, or array of upper/lower range
yearRange: 10,
// show week numbers at head of row
showWeekNumber: false,
// used internally (don't config outside)
minYear: 0,
maxYear: 9999,
minMonth: undefined,
maxMonth: undefined,
isRTL: false,
// Additional text to append to the year in the calendar title
yearSuffix: '',
// Render the month after year in the calendar title
showMonthAfterYear: false,
// how many months are visible
numberOfMonths: 1,
// when numberOfMonths is used, this will help you to choose where the main calendar will be (default `left`, can be set to `right`)
// only used for the first display or when a selected date is not visible
mainCalendar: 'left',
// Specify a DOM element to render the calendar in
container: undefined,
// internationalization
i18n: {
previousMonth : 'Previous Month',
nextMonth : 'Next Month',
months : ['January','February','March','April','May','June','July','August','September','October','November','December'],
weekdays : ['Sunday','Monday','Tuesday','Wednesday','Thursday','Friday','Saturday'],
weekdaysShort : ['Sun','Mon','Tue','Wed','Thu','Fri','Sat']
},
// callback function
onSelect: null,
onOpen: null,
onClose: null,
onDraw: null
},
/**
* templating functions to abstract HTML rendering
*/
renderDayName = function(opts, day, abbr)
{
day += opts.firstDay;
while (day >= 7) {
day -= 7;
}
return abbr ? opts.i18n.weekdaysShort[day] : opts.i18n.weekdays[day];
},
renderDay = function(d, m, y, isSelected, isToday, isDisabled, isEmpty)
{
if (isEmpty) {
return '<td class="is-empty"></td>';
}
var arr = [];
if (isDisabled) {
arr.push('is-disabled');
}
if (isToday) {
arr.push('is-today');
}
if (isSelected) {
arr.push('is-selected');
}
return '<td data-day="' + d + '" class="' + arr.join(' ') + '">' +
'<button class="pika-button pika-day" type="button" ' +
'data-pika-year="' + y + '" data-pika-month="' + m + '" data-pika-day="' + d + '">' +
d +
'</button>' +
'</td>';
},
renderWeek = function (d, m, y) {
// Lifted from http://javascript.about.com/library/blweekyear.htm, lightly modified.
var onejan = new Date(y, 0, 1),
weekNum = Math.ceil((((new Date(y, m, d) - onejan) / 86400000) + onejan.getDay()+1)/7);
return '<td class="pika-week">' + weekNum + '</td>';
},
renderRow = function(days, isRTL)
{
return '<tr>' + (isRTL ? days.reverse() : days).join('') + '</tr>';
},
renderBody = function(rows)
{
return '<tbody>' + rows.join('') + '</tbody>';
},
renderHead = function(opts)
{
var i, arr = [];
if (opts.showWeekNumber) {
arr.push('<th></th>');
}
for (i = 0; i < 7; i++) {
arr.push('<th scope="col"><abbr title="' + renderDayName(opts, i) + '">' + renderDayName(opts, i, true) + '</abbr></th>');
}
return '<thead>' + (opts.isRTL ? arr.reverse() : arr).join('') + '</thead>';
},
renderTitle = function(instance, c, year, month, refYear)
{
var i, j, arr,
opts = instance._o,
isMinYear = year === opts.minYear,
isMaxYear = year === opts.maxYear,
html = '<div class="pika-title">',
monthHtml,
yearHtml,
prev = true,
next = true;
for (arr = [], i = 0; i < 12; i++) {
arr.push('<option value="' + (year === refYear ? i - c : 12 + i - c) + '"' +
(i === month ? ' selected': '') +
((isMinYear && i < opts.minMonth) || (isMaxYear && i > opts.maxMonth) ? 'disabled' : '') + '>' +
opts.i18n.months[i] + '</option>');
}
monthHtml = '<div class="pika-label">' + opts.i18n.months[month] + '<select class="pika-select pika-select-month">' + arr.join('') + '</select></div>';
if (isArray(opts.yearRange)) {
i = opts.yearRange[0];
j = opts.yearRange[1] + 1;
} else {
i = year - opts.yearRange;
j = 1 + year + opts.yearRange;
}
for (arr = []; i < j && i <= opts.maxYear; i++) {
if (i >= opts.minYear) {
arr.push('<option value="' + i + '"' + (i === year ? ' selected': '') + '>' + (i) + '</option>');
}
}
yearHtml = '<div class="pika-label">' + year + opts.yearSuffix + '<select class="pika-select pika-select-year">' + arr.join('') + '</select></div>';
if (opts.showMonthAfterYear) {
html += yearHtml + monthHtml;
} else {
html += monthHtml + yearHtml;
}
if (isMinYear && (month === 0 || opts.minMonth >= month)) {
prev = false;
}
if (isMaxYear && (month === 11 || opts.maxMonth <= month)) {
next = false;
}
if (c === 0) {
html += '<button class="pika-prev' + (prev ? '' : ' is-disabled') + '" type="button">' + opts.i18n.previousMonth + '</button>';
}
if (c === (instance._o.numberOfMonths - 1) ) {
html += '<button class="pika-next' + (next ? '' : ' is-disabled') + '" type="button">' + opts.i18n.nextMonth + '</button>';
}
return html += '</div>';
},
renderTable = function(opts, data)
{
return '<table cellpadding="0" cellspacing="0" class="pika-table">' + renderHead(opts) + renderBody(data) + '</table>';
},
/**
* Pikaday constructor
*/
Pikaday = function(options)
{
var self = this,
opts = self.config(options);
self._onMouseDown = function(e)
{
if (!self._v) {
return;
}
e = e || window.event;
var target = e.target || e.srcElement;
if (!target) {
return;
}
if (!hasClass(target, 'is-disabled')) {
if (hasClass(target, 'pika-button') && !hasClass(target, 'is-empty')) {
self.setDate(new Date(target.getAttribute('data-pika-year'), target.getAttribute('data-pika-month'), target.getAttribute('data-pika-day')));
if (opts.bound) {
sto(function() {
self.hide();
if (opts.field) {
opts.field.blur();
}
}, 100);
}
return;
}
else if (hasClass(target, 'pika-prev')) {
self.prevMonth();
}
else if (hasClass(target, 'pika-next')) {
self.nextMonth();
}
}
if (!hasClass(target, 'pika-select')) {
if (e.preventDefault) {
e.preventDefault();
} else {
e.returnValue = false;
return false;
}
} else {
self._c = true;
}
};
self._onChange = function(e)
{
e = e || window.event;
var target = e.target || e.srcElement;
if (!target) {
return;
}
if (hasClass(target, 'pika-select-month')) {
self.gotoMonth(target.value);
}
else if (hasClass(target, 'pika-select-year')) {
self.gotoYear(target.value);
}
};
self._onInputChange = function(e)
{
var date;
if (e.firedBy === self) {
return;
}
if (hasMoment) {
date = moment(opts.field.value, opts.format);
date = (date && date.isValid()) ? date.toDate() : null;
}
else {
date = new Date(Date.parse(opts.field.value));
}
self.setDate(isDate(date) ? date : null);
if (!self._v) {
self.show();
}
};
self._onInputFocus = function()
{
self.show();
};
self._onInputClick = function()
{
self.show();
};
self._onInputBlur = function()
{
// IE allows pika div to gain focus; catch blur the input field
var pEl = document.activeElement;
do {
if (hasClass(pEl, 'pika-single')) {
return;
}
}
while ((pEl = pEl.parentNode));
if (!self._c) {
self._b = sto(function() {
self.hide();
}, 50);
}
self._c = false;
};
self._onClick = function(e)
{
e = e || window.event;
var target = e.target || e.srcElement,
pEl = target;
if (!target) {
return;
}
if (!hasEventListeners && hasClass(target, 'pika-select')) {
if (!target.onchange) {
target.setAttribute('onchange', 'return;');
addEvent(target, 'change', self._onChange);
}
}
do {
if (hasClass(pEl, 'pika-single') || pEl === opts.trigger) {
return;
}
}
while ((pEl = pEl.parentNode));
if (self._v && target !== opts.trigger && pEl !== opts.trigger) {
self.hide();
}
};
self.el = document.createElement('div');
self.el.className = 'pika-single' + (opts.isRTL ? ' is-rtl' : '');
addEvent(self.el, 'mousedown', self._onMouseDown, true);
addEvent(self.el, 'change', self._onChange);
if (opts.field) {
if (opts.container) {
opts.container.appendChild(self.el);
} else if (opts.bound) {
document.body.appendChild(self.el);
} else {
opts.field.parentNode.insertBefore(self.el, opts.field.nextSibling);
}
addEvent(opts.field, 'change', self._onInputChange);
if (!opts.defaultDate) {
if (hasMoment && opts.field.value) {
opts.defaultDate = moment(opts.field.value, opts.format).toDate();
} else {
opts.defaultDate = new Date(Date.parse(opts.field.value));
}
opts.setDefaultDate = true;
}
}
var defDate = opts.defaultDate;
if (isDate(defDate)) {
if (opts.setDefaultDate) {
self.setDate(defDate, true);
} else {
self.gotoDate(defDate);
}
} else {
self.gotoDate(new Date());
}
if (opts.bound) {
this.hide();
self.el.className += ' is-bound';
addEvent(opts.trigger, 'click', self._onInputClick);
addEvent(opts.trigger, 'focus', self._onInputFocus);
addEvent(opts.trigger, 'blur', self._onInputBlur);
} else {
this.show();
}
};
/**
* public Pikaday API
*/
Pikaday.prototype = {
/**
* configure functionality
*/
config: function(options)
{
if (!this._o) {
this._o = extend({}, defaults, true);
}
var opts = extend(this._o, options, true);
opts.isRTL = !!opts.isRTL;
opts.field = (opts.field && opts.field.nodeName) ? opts.field : null;
opts.bound = !!(opts.bound !== undefined ? opts.field && opts.bound : opts.field);
opts.trigger = (opts.trigger && opts.trigger.nodeName) ? opts.trigger : opts.field;
opts.disableWeekends = !!opts.disableWeekends;
opts.disableDayFn = (typeof opts.disableDayFn) == "function" ? opts.disableDayFn : null;
var nom = parseInt(opts.numberOfMonths, 10) || 1;
opts.numberOfMonths = nom > 4 ? 4 : nom;
if (!isDate(opts.minDate)) {
opts.minDate = false;
}
if (!isDate(opts.maxDate)) {
opts.maxDate = false;
}
if ((opts.minDate && opts.maxDate) && opts.maxDate < opts.minDate) {
opts.maxDate = opts.minDate = false;
}
if (opts.minDate) {
setToStartOfDay(opts.minDate);
opts.minYear = opts.minDate.getFullYear();
opts.minMonth = opts.minDate.getMonth();
}
if (opts.maxDate) {
setToStartOfDay(opts.maxDate);
opts.maxYear = opts.maxDate.getFullYear();
opts.maxMonth = opts.maxDate.getMonth();
}
if (isArray(opts.yearRange)) {
var fallback = new Date().getFullYear() - 10;
opts.yearRange[0] = parseInt(opts.yearRange[0], 10) || fallback;
opts.yearRange[1] = parseInt(opts.yearRange[1], 10) || fallback;
} else {
opts.yearRange = Math.abs(parseInt(opts.yearRange, 10)) || defaults.yearRange;
if (opts.yearRange > 100) {
opts.yearRange = 100;
}
}
return opts;
},
/**
* return a formatted string of the current selection (using Moment.js if available)
*/
toString: function(format)
{
return !isDate(this._d) ? '' : hasMoment ? moment(this._d).format(format || this._o.format) : this._d.toDateString();
},
/**
* return a Moment.js object of the current selection (if available)
*/
getMoment: function()
{
return hasMoment ? moment(this._d) : null;
},
/**
* set the current selection from a Moment.js object (if available)
*/
setMoment: function(date, preventOnSelect)
{
if (hasMoment && moment.isMoment(date)) {
this.setDate(date.toDate(), preventOnSelect);
}
},
/**
* return a Date object of the current selection
*/
getDate: function()
{
return isDate(this._d) ? new Date(this._d.getTime()) : null;
},
/**
* set the current selection
*/
setDate: function(date, preventOnSelect)
{
if (!date) {
this._d = null;
if (this._o.field) {
this._o.field.value = '';
fireEvent(this._o.field, 'change', { firedBy: this });
}
return this.draw();
}
if (typeof date === 'string') {
date = new Date(Date.parse(date));
}
if (!isDate(date)) {
return;
}
var min = this._o.minDate,
max = this._o.maxDate;
if (isDate(min) && date < min) {
date = min;
} else if (isDate(max) && date > max) {
date = max;
}
this._d = new Date(date.getTime());
setToStartOfDay(this._d);
this.gotoDate(this._d);
if (this._o.field) {
this._o.field.value = this.toString();
fireEvent(this._o.field, 'change', { firedBy: this });
}
if (!preventOnSelect && typeof this._o.onSelect === 'function') {
this._o.onSelect.call(this, this.getDate());
}
},
/**
* change view to a specific date
*/
gotoDate: function(date)
{
var newCalendar = true;
if (!isDate(date)) {
return;
}
if (this.calendars) {
var firstVisibleDate = new Date(this.calendars[0].year, this.calendars[0].month, 1),
lastVisibleDate = new Date(this.calendars[this.calendars.length-1].year, this.calendars[this.calendars.length-1].month, 1),
visibleDate = date.getTime();
// get the end of the month
lastVisibleDate.setMonth(lastVisibleDate.getMonth()+1);
lastVisibleDate.setDate(lastVisibleDate.getDate()-1);
newCalendar = (visibleDate < firstVisibleDate.getTime() || lastVisibleDate.getTime() < visibleDate);
}
if (newCalendar) {
this.calendars = [{
month: date.getMonth(),
year: date.getFullYear()
}];
if (this._o.mainCalendar === 'right') {
this.calendars[0].month += 1 - this._o.numberOfMonths;
}
}
this.adjustCalendars();
},
adjustCalendars: function() {
this.calendars[0] = adjustCalendar(this.calendars[0]);
for (var c = 1; c < this._o.numberOfMonths; c++) {
this.calendars[c] = adjustCalendar({
month: this.calendars[0].month + c,
year: this.calendars[0].year
});
}
this.draw();
},
gotoToday: function()
{
this.gotoDate(new Date());
},
/**
* change view to a specific month (zero-index, e.g. 0: January)
*/
gotoMonth: function(month)
{
if (!isNaN(month)) {
this.calendars[0].month = parseInt(month, 10);
this.adjustCalendars();
}
},
nextMonth: function()
{
this.calendars[0].month++;
this.adjustCalendars();
},
prevMonth: function()
{
this.calendars[0].month--;
this.adjustCalendars();
},
/**
* change view to a specific full year (e.g. "2012")
*/
gotoYear: function(year)
{
if (!isNaN(year)) {
this.calendars[0].year = parseInt(year, 10);
this.adjustCalendars();
}
},
/**
* change the minDate
*/
setMinDate: function(value)
{
this._o.minDate = value;
},
/**
* change the maxDate
*/
setMaxDate: function(value)
{
this._o.maxDate = value;
},
/**
* refresh the HTML
*/
draw: function(force)
{
if (!this._v && !force) {
return;
}
var opts = this._o,
minYear = opts.minYear,
maxYear = opts.maxYear,
minMonth = opts.minMonth,
maxMonth = opts.maxMonth,
html = '';
if (this._y <= minYear) {
this._y = minYear;
if (!isNaN(minMonth) && this._m < minMonth) {
this._m = minMonth;
}
}
if (this._y >= maxYear) {
this._y = maxYear;
if (!isNaN(maxMonth) && this._m > maxMonth) {
this._m = maxMonth;
}
}
for (var c = 0; c < opts.numberOfMonths; c++) {
html += '<div class="pika-lendar">' + renderTitle(this, c, this.calendars[c].year, this.calendars[c].month, this.calendars[0].year) + this.render(this.calendars[c].year, this.calendars[c].month) + '</div>';
}
this.el.innerHTML = html;
if (opts.bound) {
if(opts.field.type !== 'hidden') {
sto(function() {
opts.trigger.focus();
}, 1);
}
}
if (typeof this._o.onDraw === 'function') {
var self = this;
sto(function() {
self._o.onDraw.call(self);
}, 0);
}
},
adjustPosition: function()
{
if (this._o.container) return;
var field = this._o.trigger, pEl = field,
width = this.el.offsetWidth, height = this.el.offsetHeight,
viewportWidth = window.innerWidth || document.documentElement.clientWidth,
viewportHeight = window.innerHeight || document.documentElement.clientHeight,
scrollTop = window.pageYOffset || document.body.scrollTop || document.documentElement.scrollTop,
left, top, clientRect;
if (typeof field.getBoundingClientRect === 'function') {
clientRect = field.getBoundingClientRect();
left = clientRect.left + window.pageXOffset;
top = clientRect.bottom + window.pageYOffset;
} else {
left = pEl.offsetLeft;
top = pEl.offsetTop + pEl.offsetHeight;
while((pEl = pEl.offsetParent)) {
left += pEl.offsetLeft;
top += pEl.offsetTop;
}
}
// default position is bottom & left
if ((this._o.reposition && left + width > viewportWidth) ||
(
this._o.position.indexOf('right') > -1 &&
left - width + field.offsetWidth > 0
)
) {
left = left - width + field.offsetWidth;
}
if ((this._o.reposition && top + height > viewportHeight + scrollTop) ||
(
this._o.position.indexOf('top') > -1 &&
top - height - field.offsetHeight > 0
)
) {
top = top - height - field.offsetHeight;
}
this.el.style.cssText = [
'position: absolute',
'left: ' + left + 'px',
'top: ' + top + 'px'
].join(';');
},
/**
* render HTML for a particular month
*/
render: function(year, month)
{
var opts = this._o,
now = new Date(),
days = getDaysInMonth(year, month),
before = new Date(year, month, 1).getDay(),
data = [],
row = [];
setToStartOfDay(now);
if (opts.firstDay > 0) {
before -= opts.firstDay;
if (before < 0) {
before += 7;
}
}
var cells = days + before,
after = cells;
while(after > 7) {
after -= 7;
}
cells += 7 - after;
for (var i = 0, r = 0; i < cells; i++)
{
var day = new Date(year, month, 1 + (i - before)),
isSelected = isDate(this._d) ? compareDates(day, this._d) : false,
isToday = compareDates(day, now),
isEmpty = i < before || i >= (days + before),
isDisabled = (opts.minDate && day < opts.minDate) ||
(opts.maxDate && day > opts.maxDate) ||
(opts.disableWeekends && isWeekend(day)) ||
(opts.disableDayFn && opts.disableDayFn(day));
row.push(renderDay(1 + (i - before), month, year, isSelected, isToday, isDisabled, isEmpty));
if (++r === 7) {
if (opts.showWeekNumber) {
row.unshift(renderWeek(i - before, month, year));
}
data.push(renderRow(row, opts.isRTL));
row = [];
r = 0;
}
}
return renderTable(opts, data);
},
isVisible: function()
{
return this._v;
},
show: function()
{
if (!this._v) {
removeClass(this.el, 'is-hidden');
this._v = true;
this.draw();
if (this._o.bound) {
addEvent(document, 'click', this._onClick);
this.adjustPosition();
}
if (typeof this._o.onOpen === 'function') {
this._o.onOpen.call(this);
}
}
},
hide: function()
{
var v = this._v;
if (v !== false) {
if (this._o.bound) {
removeEvent(document, 'click', this._onClick);
}
this.el.style.cssText = '';
addClass(this.el, 'is-hidden');
this._v = false;
if (v !== undefined && typeof this._o.onClose === 'function') {
this._o.onClose.call(this);
}
}
},
/**
* GAME OVER
*/
destroy: function()
{
this.hide();
removeEvent(this.el, 'mousedown', this._onMouseDown, true);
removeEvent(this.el, 'change', this._onChange);
if (this._o.field) {
removeEvent(this._o.field, 'change', this._onInputChange);
if (this._o.bound) {
removeEvent(this._o.trigger, 'click', this._onInputClick);
removeEvent(this._o.trigger, 'focus', this._onInputFocus);
removeEvent(this._o.trigger, 'blur', this._onInputBlur);
}
}
if (this.el.parentNode) {
this.el.parentNode.removeChild(this.el);
}
}
};
return Pikaday;
}));
},{"moment":"moment"}],"react-alt-text":[function(require,module,exports){
var React = require('react');
var blacklist = require('blacklist');
var vkey = require('vkey');
var AltText = React.createClass({
displayName: 'AltText',
getDefaultProps: function() {
return {
component: 'span',
modifier: '<alt>',
normal: '',
modified: ''
};
},
getInitialState: function() {
return {
modified: false
};
},
componentDidMount: function() {
document.body.addEventListener('keydown', this.handleKeyDown, false);
document.body.addEventListener('keyup', this.handleKeyUp, false);
},
handleKeyDown: function(e) {
if (vkey[e.keyCode] !== this.props.modifier) return;
this.setState({
modified: true
});
},
handleKeyUp: function(e) {
if (vkey[e.keyCode] !== this.props.modifier) return;
this.setState({
modified: false
});
},
componentWillUnmount: function() {
document.body.removeEventListener('keydown', this.handleKeyDown);
document.body.removeEventListener('keyup', this.handleKeyUp);
},
render: function() {
var props = blacklist(this.props, 'component', 'modifier', 'normal', 'modified');
return React.createElement(this.props.component, props, this.state.modified ? this.props.modified : this.props.normal);
}
});
module.exports = AltText;
},{"blacklist":1,"react":"react","vkey":179}],"react-select":[function(require,module,exports){
'use strict';
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var React = require('react');
var Input = require('react-input-autosize');
var classes = require('classnames');
var Value = require('./Value');
var requestId = 0;
var Select = React.createClass({
displayName: 'Select',
propTypes: {
value: React.PropTypes.any, // initial field value
multi: React.PropTypes.bool, // multi-value input
disabled: React.PropTypes.bool, // whether the Select is disabled or not
options: React.PropTypes.array, // array of options
delimiter: React.PropTypes.string, // delimiter to use to join multiple values
asyncOptions: React.PropTypes.func, // function to call to get options
autoload: React.PropTypes.bool, // whether to auto-load the default async options set
placeholder: React.PropTypes.string, // field placeholder, displayed when there's no value
noResultsText: React.PropTypes.string, // placeholder displayed when there are no matching search results
clearable: React.PropTypes.bool, // should it be possible to reset value
clearValueText: React.PropTypes.string, // title for the "clear" control
clearAllText: React.PropTypes.string, // title for the "clear" control when multi: true
searchable: React.PropTypes.bool, // whether to enable searching feature or not
searchPromptText: React.PropTypes.string, // label to prompt for search input
name: React.PropTypes.string, // field name, for hidden <input /> tag
onChange: React.PropTypes.func, // onChange handler: function(newValue) {}
onFocus: React.PropTypes.func, // onFocus handler: function(event) {}
onBlur: React.PropTypes.func, // onBlur handler: function(event) {}
className: React.PropTypes.string, // className for the outer element
filterOption: React.PropTypes.func, // method to filter a single option: function(option, filterString)
filterOptions: React.PropTypes.func, // method to filter the options array: function([options], filterString, [values])
matchPos: React.PropTypes.string, // (any|start) match the start or entire string when filtering
matchProp: React.PropTypes.string, // (any|label|value) which option property to filter on
inputProps: React.PropTypes.object, // custom attributes for the Input (in the Select-control) e.g: {'data-foo': 'bar'}
/*
* Allow user to make option label clickable. When this handler is defined we should
* wrap label into <a>label</a> tag.
*
* onOptionLabelClick handler: function (value, event) {}
*
*/
onOptionLabelClick: React.PropTypes.func
},
getDefaultProps: function getDefaultProps() {
return {
value: undefined,
options: undefined,
disabled: false,
delimiter: ',',
asyncOptions: undefined,
autoload: true,
placeholder: 'Select...',
noResultsText: 'No results found',
clearable: true,
clearValueText: 'Clear value',
clearAllText: 'Clear all',
searchable: true,
searchPromptText: 'Type to search',
name: undefined,
onChange: undefined,
className: undefined,
matchPos: 'any',
matchProp: 'any',
inputProps: {},
onOptionLabelClick: undefined
};
},
getInitialState: function getInitialState() {
return {
/*
* set by getStateFromValue on componentWillMount:
* - value
* - values
* - filteredOptions
* - inputValue
* - placeholder
* - focusedOption
*/
options: this.props.options,
isFocused: false,
isOpen: false,
isLoading: false
};
},
componentWillMount: function componentWillMount() {
this._optionsCache = {};
this._optionsFilterString = '';
this.setState(this.getStateFromValue(this.props.value));
if (this.props.asyncOptions && this.props.autoload) {
this.autoloadAsyncOptions();
}
var self = this;
this._closeMenuIfClickedOutside = function (event) {
if (!self.state.isOpen) {
return;
}
var menuElem = self.refs.selectMenuContainer.getDOMNode();
var controlElem = self.refs.control.getDOMNode();
var eventOccuredOutsideMenu = self.clickedOutsideElement(menuElem, event);
var eventOccuredOutsideControl = self.clickedOutsideElement(controlElem, event);
// Hide dropdown menu if click occurred outside of menu
if (eventOccuredOutsideMenu && eventOccuredOutsideControl) {
self.setState({
isOpen: false
}, self._unbindCloseMenuIfClickedOutside);
}
};
this._bindCloseMenuIfClickedOutside = function () {
document.addEventListener('click', self._closeMenuIfClickedOutside);
};
this._unbindCloseMenuIfClickedOutside = function () {
document.removeEventListener('click', self._closeMenuIfClickedOutside);
};
},
componentWillUnmount: function componentWillUnmount() {
clearTimeout(this._blurTimeout);
clearTimeout(this._focusTimeout);
if (this.state.isOpen) {
this._unbindCloseMenuIfClickedOutside();
}
},
componentWillReceiveProps: function componentWillReceiveProps(newProps) {
if (JSON.stringify(newProps.options) !== JSON.stringify(this.props.options)) {
this.setState({
options: newProps.options,
filteredOptions: this.filterOptions(newProps.options)
});
}
if (newProps.value !== this.state.value) {
this.setState(this.getStateFromValue(newProps.value, newProps.options));
}
},
componentDidUpdate: function componentDidUpdate() {
var self = this;
if (!this.props.disabled && this._focusAfterUpdate) {
clearTimeout(this._blurTimeout);
this._focusTimeout = setTimeout(function () {
self.getInputNode().focus();
self._focusAfterUpdate = false;
}, 50);
}
if (this._focusedOptionReveal) {
if (this.refs.focused && this.refs.menu) {
var focusedDOM = this.refs.focused.getDOMNode();
var menuDOM = this.refs.menu.getDOMNode();
var focusedRect = focusedDOM.getBoundingClientRect();
var menuRect = menuDOM.getBoundingClientRect();
if (focusedRect.bottom > menuRect.bottom || focusedRect.top < menuRect.top) {
menuDOM.scrollTop = focusedDOM.offsetTop + focusedDOM.clientHeight - menuDOM.offsetHeight;
}
}
this._focusedOptionReveal = false;
}
},
focus: function focus() {
this.getInputNode().focus();
},
clickedOutsideElement: function clickedOutsideElement(element, event) {
var eventTarget = event.target ? event.target : event.srcElement;
while (eventTarget != null) {
if (eventTarget === element) return false;
eventTarget = eventTarget.offsetParent;
}
return true;
},
getStateFromValue: function getStateFromValue(value, options) {
if (!options) {
options = this.state.options;
}
// reset internal filter string
this._optionsFilterString = '';
var values = this.initValuesArray(value, options),
filteredOptions = this.filterOptions(options, values);
return {
value: values.map(function (v) {
return v.value;
}).join(this.props.delimiter),
values: values,
inputValue: '',
filteredOptions: filteredOptions,
placeholder: !this.props.multi && values.length ? values[0].label : this.props.placeholder,
focusedOption: !this.props.multi && values.length ? values[0] : filteredOptions[0]
};
},
initValuesArray: function initValuesArray(values, options) {
if (!Array.isArray(values)) {
if (typeof values === 'string') {
values = values.split(this.props.delimiter);
} else {
values = values ? [values] : [];
}
}
return values.map(function (val) {
if (typeof val === 'string') {
for (var key in options) {
if (options.hasOwnProperty(key) && options[key] && options[key].value === val) {
return options[key];
}
}
return { value: val, label: val };
} else {
return val;
}
});
},
setValue: function setValue(value, focusAfterUpdate) {
if (focusAfterUpdate || focusAfterUpdate === undefined) {
this._focusAfterUpdate = true;
}
var newState = this.getStateFromValue(value);
newState.isOpen = false;
this.fireChangeEvent(newState);
this.setState(newState);
},
selectValue: function selectValue(value) {
if (!this.props.multi) {
this.setValue(value);
} else if (value) {
this.addValue(value);
}
this._unbindCloseMenuIfClickedOutside();
},
addValue: function addValue(value) {
this.setValue(this.state.values.concat(value));
},
popValue: function popValue() {
this.setValue(this.state.values.slice(0, this.state.values.length - 1));
},
removeValue: function removeValue(valueToRemove) {
this.setValue(this.state.values.filter(function (value) {
return value !== valueToRemove;
}));
},
clearValue: function clearValue(event) {
// if the event was triggered by a mousedown and not the primary
// button, ignore it.
if (event && event.type === 'mousedown' && event.button !== 0) {
return;
}
this.setValue(null);
},
resetValue: function resetValue() {
this.setValue(this.state.value);
},
getInputNode: function getInputNode() {
var input = this.refs.input;
return this.props.searchable ? input : input.getDOMNode();
},
fireChangeEvent: function fireChangeEvent(newState) {
if (newState.value !== this.state.value && this.props.onChange) {
this.props.onChange(newState.value, newState.values);
}
},
handleMouseDown: function handleMouseDown(event) {
// if the event was triggered by a mousedown and not the primary
// button, or if the component is disabled, ignore it.
if (this.props.disabled || event.type === 'mousedown' && event.button !== 0) {
return;
}
event.stopPropagation();
event.preventDefault();
if (this.state.isFocused) {
this.setState({
isOpen: true
}, this._bindCloseMenuIfClickedOutside);
} else {
this._openAfterFocus = true;
this.getInputNode().focus();
}
},
handleInputFocus: function handleInputFocus(event) {
var newIsOpen = this.state.isOpen || this._openAfterFocus;
this.setState({
isFocused: true,
isOpen: newIsOpen
}, function () {
if (newIsOpen) {
this._bindCloseMenuIfClickedOutside();
} else {
this._unbindCloseMenuIfClickedOutside();
}
});
this._openAfterFocus = false;
if (this.props.onFocus) {
this.props.onFocus(event);
}
},
handleInputBlur: function handleInputBlur(event) {
var self = this;
this._blurTimeout = setTimeout(function () {
if (self._focusAfterUpdate) return;
self.setState({
isFocused: false
});
}, 50);
if (this.props.onBlur) {
this.props.onBlur(event);
}
},
handleKeyDown: function handleKeyDown(event) {
if (this.state.disabled) return;
switch (event.keyCode) {
case 8:
// backspace
if (!this.state.inputValue) {
this.popValue();
}
return;
case 9:
// tab
if (event.shiftKey || !this.state.isOpen || !this.state.focusedOption) {
return;
}
this.selectFocusedOption();
break;
case 13:
// enter
this.selectFocusedOption();
break;
case 27:
// escape
if (this.state.isOpen) {
this.resetValue();
} else {
this.clearValue();
}
break;
case 38:
// up
this.focusPreviousOption();
break;
case 40:
// down
this.focusNextOption();
break;
default:
return;
}
event.preventDefault();
},
// Ensures that the currently focused option is available in filteredOptions.
// If not, returns the first available option.
_getNewFocusedOption: function _getNewFocusedOption(filteredOptions) {
for (var key in filteredOptions) {
if (filteredOptions.hasOwnProperty(key) && filteredOptions[key] === this.state.focusedOption) {
return filteredOptions[key];
}
}
return filteredOptions[0];
},
handleInputChange: function handleInputChange(event) {
// assign an internal variable because we need to use
// the latest value before setState() has completed.
this._optionsFilterString = event.target.value;
if (this.props.asyncOptions) {
this.setState({
isLoading: true,
inputValue: event.target.value
});
this.loadAsyncOptions(event.target.value, {
isLoading: false,
isOpen: true
}, this._bindCloseMenuIfClickedOutside);
} else {
var filteredOptions = this.filterOptions(this.state.options);
this.setState({
isOpen: true,
inputValue: event.target.value,
filteredOptions: filteredOptions,
focusedOption: this._getNewFocusedOption(filteredOptions)
}, this._bindCloseMenuIfClickedOutside);
}
},
autoloadAsyncOptions: function autoloadAsyncOptions() {
var self = this;
this.loadAsyncOptions('', {}, function () {
// update with fetched but don't focus
self.setValue(self.props.value, false);
});
},
loadAsyncOptions: function loadAsyncOptions(input, state, callback) {
var thisRequestId = this._currentRequestId = requestId++;
for (var i = 0; i <= input.length; i++) {
var cacheKey = input.slice(0, i);
if (this._optionsCache[cacheKey] && (input === cacheKey || this._optionsCache[cacheKey].complete)) {
var options = this._optionsCache[cacheKey].options;
var filteredOptions = this.filterOptions(options);
var newState = {
options: options,
filteredOptions: filteredOptions,
focusedOption: this._getNewFocusedOption(filteredOptions)
};
for (var key in state) {
if (state.hasOwnProperty(key)) {
newState[key] = state[key];
}
}
this.setState(newState);
if (callback) callback({});
return;
}
}
var self = this;
this.props.asyncOptions(input, function (err, data) {
if (err) throw err;
self._optionsCache[input] = data;
if (thisRequestId !== self._currentRequestId) {
return;
}
var filteredOptions = self.filterOptions(data.options);
var newState = {
options: data.options,
filteredOptions: filteredOptions,
focusedOption: self._getNewFocusedOption(filteredOptions)
};
for (var key in state) {
if (state.hasOwnProperty(key)) {
newState[key] = state[key];
}
}
self.setState(newState);
if (callback) callback({});
});
},
filterOptions: function filterOptions(options, values) {
if (!this.props.searchable) {
return options;
}
var filterValue = this._optionsFilterString;
var exclude = (values || this.state.values).map(function (i) {
return i.value;
});
if (this.props.filterOptions) {
return this.props.filterOptions.call(this, options, filterValue, exclude);
} else {
var filterOption = function filterOption(op) {
if (this.props.multi && exclude.indexOf(op.value) > -1) return false;
if (this.props.filterOption) return this.props.filterOption.call(this, op, filterValue);
var valueTest = String(op.value),
labelTest = String(op.label);
return !filterValue || this.props.matchPos === 'start' ? this.props.matchProp !== 'label' && valueTest.toLowerCase().substr(0, filterValue.length) === filterValue || this.props.matchProp !== 'value' && labelTest.toLowerCase().substr(0, filterValue.length) === filterValue : this.props.matchProp !== 'label' && valueTest.toLowerCase().indexOf(filterValue.toLowerCase()) >= 0 || this.props.matchProp !== 'value' && labelTest.toLowerCase().indexOf(filterValue.toLowerCase()) >= 0;
};
return (options || []).filter(filterOption, this);
}
},
selectFocusedOption: function selectFocusedOption() {
return this.selectValue(this.state.focusedOption);
},
focusOption: function focusOption(op) {
this.setState({
focusedOption: op
});
},
focusNextOption: function focusNextOption() {
this.focusAdjacentOption('next');
},
focusPreviousOption: function focusPreviousOption() {
this.focusAdjacentOption('previous');
},
focusAdjacentOption: function focusAdjacentOption(dir) {
this._focusedOptionReveal = true;
var ops = this.state.filteredOptions;
if (!this.state.isOpen) {
this.setState({
isOpen: true,
inputValue: '',
focusedOption: this.state.focusedOption || ops[dir === 'next' ? 0 : ops.length - 1]
}, this._bindCloseMenuIfClickedOutside);
return;
}
if (!ops.length) {
return;
}
var focusedIndex = -1;
for (var i = 0; i < ops.length; i++) {
if (this.state.focusedOption === ops[i]) {
focusedIndex = i;
break;
}
}
var focusedOption = ops[0];
if (dir === 'next' && focusedIndex > -1 && focusedIndex < ops.length - 1) {
focusedOption = ops[focusedIndex + 1];
} else if (dir === 'previous') {
if (focusedIndex > 0) {
focusedOption = ops[focusedIndex - 1];
} else {
focusedOption = ops[ops.length - 1];
}
}
this.setState({
focusedOption: focusedOption
});
},
unfocusOption: function unfocusOption(op) {
if (this.state.focusedOption === op) {
this.setState({
focusedOption: null
});
}
},
buildMenu: function buildMenu() {
var focusedValue = this.state.focusedOption ? this.state.focusedOption.value : null;
if (this.state.filteredOptions.length > 0) {
focusedValue = focusedValue == null ? this.state.filteredOptions[0] : focusedValue;
}
var ops = Object.keys(this.state.filteredOptions).map(function (key) {
var op = this.state.filteredOptions[key];
var isFocused = focusedValue === op.value;
var optionClass = classes({
'Select-option': true,
'is-focused': isFocused,
'is-disabled': op.disabled
});
var ref = isFocused ? 'focused' : null;
var mouseEnter = this.focusOption.bind(this, op);
var mouseLeave = this.unfocusOption.bind(this, op);
var mouseDown = this.selectValue.bind(this, op);
if (op.disabled) {
return React.createElement(
'div',
{ ref: ref, key: 'option-' + op.value, className: optionClass },
op.label
);
} else {
return React.createElement(
'div',
{ ref: ref, key: 'option-' + op.value, className: optionClass, onMouseEnter: mouseEnter, onMouseLeave: mouseLeave, onMouseDown: mouseDown, onClick: mouseDown },
op.label
);
}
}, this);
return ops.length ? ops : React.createElement(
'div',
{ className: 'Select-noresults' },
this.props.asyncOptions && !this.state.inputValue ? this.props.searchPromptText : this.props.noResultsText
);
},
handleOptionLabelClick: function handleOptionLabelClick(value, event) {
var handler = this.props.onOptionLabelClick;
if (handler) {
handler(value, event);
}
},
render: function render() {
var selectClass = classes('Select', this.props.className, {
'is-multi': this.props.multi,
'is-searchable': this.props.searchable,
'is-open': this.state.isOpen,
'is-focused': this.state.isFocused,
'is-loading': this.state.isLoading,
'is-disabled': this.props.disabled,
'has-value': this.state.value
});
var value = [];
if (this.props.multi) {
this.state.values.forEach(function (val) {
var props = {
key: val.value,
optionLabelClick: !!this.props.onOptionLabelClick,
onOptionLabelClick: this.handleOptionLabelClick.bind(this, val),
onRemove: this.removeValue.bind(this, val)
};
for (var key in val) {
if (val.hasOwnProperty(key)) {
props[key] = val[key];
}
}
value.push(React.createElement(Value, props));
}, this);
}
if (this.props.disabled || !this.state.inputValue && (!this.props.multi || !value.length)) {
value.push(React.createElement(
'div',
{ className: 'Select-placeholder', key: 'placeholder' },
this.state.placeholder
));
}
var loading = this.state.isLoading ? React.createElement('span', { className: 'Select-loading', 'aria-hidden': 'true' }) : null;
var clear = this.props.clearable && this.state.value && !this.props.disabled ? React.createElement('span', { className: 'Select-clear', title: this.props.multi ? this.props.clearAllText : this.props.clearValueText, 'aria-label': this.props.multi ? this.props.clearAllText : this.props.clearValueText, onMouseDown: this.clearValue, onClick: this.clearValue, dangerouslySetInnerHTML: { __html: '×' } }) : null;
var menu;
var menuProps;
if (this.state.isOpen) {
menuProps = {
ref: 'menu',
className: 'Select-menu'
};
if (this.props.multi) {
menuProps.onMouseDown = this.handleMouseDown;
}
menu = React.createElement(
'div',
{ ref: 'selectMenuContainer', className: 'Select-menu-outer' },
React.createElement(
'div',
menuProps,
this.buildMenu()
)
);
}
var input;
var inputProps = {
ref: 'input',
className: 'Select-input',
tabIndex: this.props.tabIndex || 0,
onFocus: this.handleInputFocus,
onBlur: this.handleInputBlur
};
for (var key in this.props.inputProps) {
if (this.props.inputProps.hasOwnProperty(key)) {
inputProps[key] = this.props.inputProps[key];
}
}
if (this.props.searchable && !this.props.disabled) {
input = React.createElement(Input, _extends({ value: this.state.inputValue, onChange: this.handleInputChange, minWidth: '5' }, inputProps));
} else {
input = React.createElement(
'div',
inputProps,
' '
);
}
return React.createElement(
'div',
{ ref: 'wrapper', className: selectClass },
React.createElement('input', { type: 'hidden', ref: 'value', name: this.props.name, value: this.state.value, disabled: this.props.disabled }),
React.createElement(
'div',
{ className: 'Select-control', ref: 'control', onKeyDown: this.handleKeyDown, onMouseDown: this.handleMouseDown, onTouchEnd: this.handleMouseDown },
value,
input,
React.createElement('span', { className: 'Select-arrow' }),
loading,
clear
),
menu
);
}
});
module.exports = Select;
},{"./Value":4,"classnames":"classnames","react":"react","react-input-autosize":5}],"react/addons":[function(require,module,exports){
module.exports = require('./lib/ReactWithAddons');
},{"./lib/ReactWithAddons":105}],"react":[function(require,module,exports){
module.exports = require('./lib/React');
},{"./lib/React":35}],"superagent":[function(require,module,exports){
/**
* Module dependencies.
*/
var Emitter = require('emitter');
var reduce = require('reduce');
/**
* Root reference for iframes.
*/
var root = 'undefined' == typeof window
? (this || self)
: window;
/**
* Noop.
*/
function noop(){};
/**
* Check if `obj` is a host object,
* we don't want to serialize these :)
*
* TODO: future proof, move to compoent land
*
* @param {Object} obj
* @return {Boolean}
* @api private
*/
function isHost(obj) {
var str = {}.toString.call(obj);
switch (str) {
case '[object File]':
case '[object Blob]':
case '[object FormData]':
return true;
default:
return false;
}
}
/**
* Determine XHR.
*/
request.getXHR = function () {
if (root.XMLHttpRequest
&& (!root.location || 'file:' != root.location.protocol
|| !root.ActiveXObject)) {
return new XMLHttpRequest;
} else {
try { return new ActiveXObject('Microsoft.XMLHTTP'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP.6.0'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP.3.0'); } catch(e) {}
try { return new ActiveXObject('Msxml2.XMLHTTP'); } catch(e) {}
}
return false;
};
/**
* Removes leading and trailing whitespace, added to support IE.
*
* @param {String} s
* @return {String}
* @api private
*/
var trim = ''.trim
? function(s) { return s.trim(); }
: function(s) { return s.replace(/(^\s*|\s*$)/g, ''); };
/**
* Check if `obj` is an object.
*
* @param {Object} obj
* @return {Boolean}
* @api private
*/
function isObject(obj) {
return obj === Object(obj);
}
/**
* Serialize the given `obj`.
*
* @param {Object} obj
* @return {String}
* @api private
*/
function serialize(obj) {
if (!isObject(obj)) return obj;
var pairs = [];
for (var key in obj) {
if (null != obj[key]) {
pairs.push(encodeURIComponent(key)
+ '=' + encodeURIComponent(obj[key]));
}
}
return pairs.join('&');
}
/**
* Expose serialization method.
*/
request.serializeObject = serialize;
/**
* Parse the given x-www-form-urlencoded `str`.
*
* @param {String} str
* @return {Object}
* @api private
*/
function parseString(str) {
var obj = {};
var pairs = str.split('&');
var parts;
var pair;
for (var i = 0, len = pairs.length; i < len; ++i) {
pair = pairs[i];
parts = pair.split('=');
obj[decodeURIComponent(parts[0])] = decodeURIComponent(parts[1]);
}
return obj;
}
/**
* Expose parser.
*/
request.parseString = parseString;
/**
* Default MIME type map.
*
* superagent.types.xml = 'application/xml';
*
*/
request.types = {
html: 'text/html',
json: 'application/json',
xml: 'application/xml',
urlencoded: 'application/x-www-form-urlencoded',
'form': 'application/x-www-form-urlencoded',
'form-data': 'application/x-www-form-urlencoded'
};
/**
* Default serialization map.
*
* superagent.serialize['application/xml'] = function(obj){
* return 'generated xml here';
* };
*
*/
request.serialize = {
'application/x-www-form-urlencoded': serialize,
'application/json': JSON.stringify
};
/**
* Default parsers.
*
* superagent.parse['application/xml'] = function(str){
* return { object parsed from str };
* };
*
*/
request.parse = {
'application/x-www-form-urlencoded': parseString,
'application/json': JSON.parse
};
/**
* Parse the given header `str` into
* an object containing the mapped fields.
*
* @param {String} str
* @return {Object}
* @api private
*/
function parseHeader(str) {
var lines = str.split(/\r?\n/);
var fields = {};
var index;
var line;
var field;
var val;
lines.pop(); // trailing CRLF
for (var i = 0, len = lines.length; i < len; ++i) {
line = lines[i];
index = line.indexOf(':');
field = line.slice(0, index).toLowerCase();
val = trim(line.slice(index + 1));
fields[field] = val;
}
return fields;
}
/**
* Return the mime type for the given `str`.
*
* @param {String} str
* @return {String}
* @api private
*/
function type(str){
return str.split(/ *; */).shift();
};
/**
* Return header field parameters.
*
* @param {String} str
* @return {Object}
* @api private
*/
function params(str){
return reduce(str.split(/ *; */), function(obj, str){
var parts = str.split(/ *= */)
, key = parts.shift()
, val = parts.shift();
if (key && val) obj[key] = val;
return obj;
}, {});
};
/**
* Initialize a new `Response` with the given `xhr`.
*
* - set flags (.ok, .error, etc)
* - parse header
*
* Examples:
*
* Aliasing `superagent` as `request` is nice:
*
* request = superagent;
*
* We can use the promise-like API, or pass callbacks:
*
* request.get('/').end(function(res){});
* request.get('/', function(res){});
*
* Sending data can be chained:
*
* request
* .post('/user')
* .send({ name: 'tj' })
* .end(function(res){});
*
* Or passed to `.send()`:
*
* request
* .post('/user')
* .send({ name: 'tj' }, function(res){});
*
* Or passed to `.post()`:
*
* request
* .post('/user', { name: 'tj' })
* .end(function(res){});
*
* Or further reduced to a single call for simple cases:
*
* request
* .post('/user', { name: 'tj' }, function(res){});
*
* @param {XMLHTTPRequest} xhr
* @param {Object} options
* @api private
*/
function Response(req, options) {
options = options || {};
this.req = req;
this.xhr = this.req.xhr;
// responseText is accessible only if responseType is '' or 'text' and on older browsers
this.text = ((this.req.method !='HEAD' && (this.xhr.responseType === '' || this.xhr.responseType === 'text')) || typeof this.xhr.responseType === 'undefined')
? this.xhr.responseText
: null;
this.statusText = this.req.xhr.statusText;
this.setStatusProperties(this.xhr.status);
this.header = this.headers = parseHeader(this.xhr.getAllResponseHeaders());
// getAllResponseHeaders sometimes falsely returns "" for CORS requests, but
// getResponseHeader still works. so we get content-type even if getting
// other headers fails.
this.header['content-type'] = this.xhr.getResponseHeader('content-type');
this.setHeaderProperties(this.header);
this.body = this.req.method != 'HEAD'
? this.parseBody(this.text ? this.text : this.xhr.response)
: null;
}
/**
* Get case-insensitive `field` value.
*
* @param {String} field
* @return {String}
* @api public
*/
Response.prototype.get = function(field){
return this.header[field.toLowerCase()];
};
/**
* Set header related properties:
*
* - `.type` the content type without params
*
* A response of "Content-Type: text/plain; charset=utf-8"
* will provide you with a `.type` of "text/plain".
*
* @param {Object} header
* @api private
*/
Response.prototype.setHeaderProperties = function(header){
// content-type
var ct = this.header['content-type'] || '';
this.type = type(ct);
// params
var obj = params(ct);
for (var key in obj) this[key] = obj[key];
};
/**
* Parse the given body `str`.
*
* Used for auto-parsing of bodies. Parsers
* are defined on the `superagent.parse` object.
*
* @param {String} str
* @return {Mixed}
* @api private
*/
Response.prototype.parseBody = function(str){
var parse = request.parse[this.type];
return parse && str && (str.length || str instanceof Object)
? parse(str)
: null;
};
/**
* Set flags such as `.ok` based on `status`.
*
* For example a 2xx response will give you a `.ok` of __true__
* whereas 5xx will be __false__ and `.error` will be __true__. The
* `.clientError` and `.serverError` are also available to be more
* specific, and `.statusType` is the class of error ranging from 1..5
* sometimes useful for mapping respond colors etc.
*
* "sugar" properties are also defined for common cases. Currently providing:
*
* - .noContent
* - .badRequest
* - .unauthorized
* - .notAcceptable
* - .notFound
*
* @param {Number} status
* @api private
*/
Response.prototype.setStatusProperties = function(status){
// handle IE9 bug: http://stackoverflow.com/questions/10046972/msie-returns-status-code-of-1223-for-ajax-request
if (status === 1223) {
status = 204;
}
var type = status / 100 | 0;
// status / class
this.status = status;
this.statusType = type;
// basics
this.info = 1 == type;
this.ok = 2 == type;
this.clientError = 4 == type;
this.serverError = 5 == type;
this.error = (4 == type || 5 == type)
? this.toError()
: false;
// sugar
this.accepted = 202 == status;
this.noContent = 204 == status;
this.badRequest = 400 == status;
this.unauthorized = 401 == status;
this.notAcceptable = 406 == status;
this.notFound = 404 == status;
this.forbidden = 403 == status;
};
/**
* Return an `Error` representative of this response.
*
* @return {Error}
* @api public
*/
Response.prototype.toError = function(){
var req = this.req;
var method = req.method;
var url = req.url;
var msg = 'cannot ' + method + ' ' + url + ' (' + this.status + ')';
var err = new Error(msg);
err.status = this.status;
err.method = method;
err.url = url;
return err;
};
/**
* Expose `Response`.
*/
request.Response = Response;
/**
* Initialize a new `Request` with the given `method` and `url`.
*
* @param {String} method
* @param {String} url
* @api public
*/
function Request(method, url) {
var self = this;
Emitter.call(this);
this._query = this._query || [];
this.method = method;
this.url = url;
this.header = {};
this._header = {};
this.on('end', function(){
var err = null;
var res = null;
try {
res = new Response(self);
} catch(e) {
err = new Error('Parser is unable to parse the response');
err.parse = true;
err.original = e;
return self.callback(err);
}
self.emit('response', res);
if (err) {
return self.callback(err, res);
}
if (res.status >= 200 && res.status < 300) {
return self.callback(err, res);
}
var new_err = new Error(res.statusText || 'Unsuccessful HTTP response');
new_err.original = err;
new_err.response = res;
new_err.status = res.status;
self.callback(err || new_err, res);
});
}
/**
* Mixin `Emitter`.
*/
Emitter(Request.prototype);
/**
* Allow for extension
*/
Request.prototype.use = function(fn) {
fn(this);
return this;
}
/**
* Set timeout to `ms`.
*
* @param {Number} ms
* @return {Request} for chaining
* @api public
*/
Request.prototype.timeout = function(ms){
this._timeout = ms;
return this;
};
/**
* Clear previous timeout.
*
* @return {Request} for chaining
* @api public
*/
Request.prototype.clearTimeout = function(){
this._timeout = 0;
clearTimeout(this._timer);
return this;
};
/**
* Abort the request, and clear potential timeout.
*
* @return {Request}
* @api public
*/
Request.prototype.abort = function(){
if (this.aborted) return;
this.aborted = true;
this.xhr.abort();
this.clearTimeout();
this.emit('abort');
return this;
};
/**
* Set header `field` to `val`, or multiple fields with one object.
*
* Examples:
*
* req.get('/')
* .set('Accept', 'application/json')
* .set('X-API-Key', 'foobar')
* .end(callback);
*
* req.get('/')
* .set({ Accept: 'application/json', 'X-API-Key': 'foobar' })
* .end(callback);
*
* @param {String|Object} field
* @param {String} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.set = function(field, val){
if (isObject(field)) {
for (var key in field) {
this.set(key, field[key]);
}
return this;
}
this._header[field.toLowerCase()] = val;
this.header[field] = val;
return this;
};
/**
* Remove header `field`.
*
* Example:
*
* req.get('/')
* .unset('User-Agent')
* .end(callback);
*
* @param {String} field
* @return {Request} for chaining
* @api public
*/
Request.prototype.unset = function(field){
delete this._header[field.toLowerCase()];
delete this.header[field];
return this;
};
/**
* Get case-insensitive header `field` value.
*
* @param {String} field
* @return {String}
* @api private
*/
Request.prototype.getHeader = function(field){
return this._header[field.toLowerCase()];
};
/**
* Set Content-Type to `type`, mapping values from `request.types`.
*
* Examples:
*
* superagent.types.xml = 'application/xml';
*
* request.post('/')
* .type('xml')
* .send(xmlstring)
* .end(callback);
*
* request.post('/')
* .type('application/xml')
* .send(xmlstring)
* .end(callback);
*
* @param {String} type
* @return {Request} for chaining
* @api public
*/
Request.prototype.type = function(type){
this.set('Content-Type', request.types[type] || type);
return this;
};
/**
* Set Accept to `type`, mapping values from `request.types`.
*
* Examples:
*
* superagent.types.json = 'application/json';
*
* request.get('/agent')
* .accept('json')
* .end(callback);
*
* request.get('/agent')
* .accept('application/json')
* .end(callback);
*
* @param {String} accept
* @return {Request} for chaining
* @api public
*/
Request.prototype.accept = function(type){
this.set('Accept', request.types[type] || type);
return this;
};
/**
* Set Authorization field value with `user` and `pass`.
*
* @param {String} user
* @param {String} pass
* @return {Request} for chaining
* @api public
*/
Request.prototype.auth = function(user, pass){
var str = btoa(user + ':' + pass);
this.set('Authorization', 'Basic ' + str);
return this;
};
/**
* Add query-string `val`.
*
* Examples:
*
* request.get('/shoes')
* .query('size=10')
* .query({ color: 'blue' })
*
* @param {Object|String} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.query = function(val){
if ('string' != typeof val) val = serialize(val);
if (val) this._query.push(val);
return this;
};
/**
* Write the field `name` and `val` for "multipart/form-data"
* request bodies.
*
* ``` js
* request.post('/upload')
* .field('foo', 'bar')
* .end(callback);
* ```
*
* @param {String} name
* @param {String|Blob|File} val
* @return {Request} for chaining
* @api public
*/
Request.prototype.field = function(name, val){
if (!this._formData) this._formData = new root.FormData();
this._formData.append(name, val);
return this;
};
/**
* Queue the given `file` as an attachment to the specified `field`,
* with optional `filename`.
*
* ``` js
* request.post('/upload')
* .attach(new Blob(['<a id="a"><b id="b">hey!</b></a>'], { type: "text/html"}))
* .end(callback);
* ```
*
* @param {String} field
* @param {Blob|File} file
* @param {String} filename
* @return {Request} for chaining
* @api public
*/
Request.prototype.attach = function(field, file, filename){
if (!this._formData) this._formData = new root.FormData();
this._formData.append(field, file, filename);
return this;
};
/**
* Send `data`, defaulting the `.type()` to "json" when
* an object is given.
*
* Examples:
*
* // querystring
* request.get('/search')
* .end(callback)
*
* // multiple data "writes"
* request.get('/search')
* .send({ search: 'query' })
* .send({ range: '1..5' })
* .send({ order: 'desc' })
* .end(callback)
*
* // manual json
* request.post('/user')
* .type('json')
* .send('{"name":"tj"})
* .end(callback)
*
* // auto json
* request.post('/user')
* .send({ name: 'tj' })
* .end(callback)
*
* // manual x-www-form-urlencoded
* request.post('/user')
* .type('form')
* .send('name=tj')
* .end(callback)
*
* // auto x-www-form-urlencoded
* request.post('/user')
* .type('form')
* .send({ name: 'tj' })
* .end(callback)
*
* // defaults to x-www-form-urlencoded
* request.post('/user')
* .send('name=tobi')
* .send('species=ferret')
* .end(callback)
*
* @param {String|Object} data
* @return {Request} for chaining
* @api public
*/
Request.prototype.send = function(data){
var obj = isObject(data);
var type = this.getHeader('Content-Type');
// merge
if (obj && isObject(this._data)) {
for (var key in data) {
this._data[key] = data[key];
}
} else if ('string' == typeof data) {
if (!type) this.type('form');
type = this.getHeader('Content-Type');
if ('application/x-www-form-urlencoded' == type) {
this._data = this._data
? this._data + '&' + data
: data;
} else {
this._data = (this._data || '') + data;
}
} else {
this._data = data;
}
if (!obj || isHost(data)) return this;
if (!type) this.type('json');
return this;
};
/**
* Invoke the callback with `err` and `res`
* and handle arity check.
*
* @param {Error} err
* @param {Response} res
* @api private
*/
Request.prototype.callback = function(err, res){
var fn = this._callback;
this.clearTimeout();
fn(err, res);
};
/**
* Invoke callback with x-domain error.
*
* @api private
*/
Request.prototype.crossDomainError = function(){
var err = new Error('Origin is not allowed by Access-Control-Allow-Origin');
err.crossDomain = true;
this.callback(err);
};
/**
* Invoke callback with timeout error.
*
* @api private
*/
Request.prototype.timeoutError = function(){
var timeout = this._timeout;
var err = new Error('timeout of ' + timeout + 'ms exceeded');
err.timeout = timeout;
this.callback(err);
};
/**
* Enable transmission of cookies with x-domain requests.
*
* Note that for this to work the origin must not be
* using "Access-Control-Allow-Origin" with a wildcard,
* and also must set "Access-Control-Allow-Credentials"
* to "true".
*
* @api public
*/
Request.prototype.withCredentials = function(){
this._withCredentials = true;
return this;
};
/**
* Initiate request, invoking callback `fn(res)`
* with an instanceof `Response`.
*
* @param {Function} fn
* @return {Request} for chaining
* @api public
*/
Request.prototype.end = function(fn){
var self = this;
var xhr = this.xhr = request.getXHR();
var query = this._query.join('&');
var timeout = this._timeout;
var data = this._formData || this._data;
// store callback
this._callback = fn || noop;
// state change
xhr.onreadystatechange = function(){
if (4 != xhr.readyState) return;
// In IE9, reads to any property (e.g. status) off of an aborted XHR will
// result in the error "Could not complete the operation due to error c00c023f"
var status;
try { status = xhr.status } catch(e) { status = 0; }
if (0 == status) {
if (self.timedout) return self.timeoutError();
if (self.aborted) return;
return self.crossDomainError();
}
self.emit('end');
};
// progress
var handleProgress = function(e){
if (e.total > 0) {
e.percent = e.loaded / e.total * 100;
}
self.emit('progress', e);
};
if (this.hasListeners('progress')) {
xhr.onprogress = handleProgress;
}
try {
if (xhr.upload && this.hasListeners('progress')) {
xhr.upload.onprogress = handleProgress;
}
} catch(e) {
// Accessing xhr.upload fails in IE from a web worker, so just pretend it doesn't exist.
// Reported here:
// https://connect.microsoft.com/IE/feedback/details/837245/xmlhttprequest-upload-throws-invalid-argument-when-used-from-web-worker-context
}
// timeout
if (timeout && !this._timer) {
this._timer = setTimeout(function(){
self.timedout = true;
self.abort();
}, timeout);
}
// querystring
if (query) {
query = request.serializeObject(query);
this.url += ~this.url.indexOf('?')
? '&' + query
: '?' + query;
}
// initiate request
xhr.open(this.method, this.url, true);
// CORS
if (this._withCredentials) xhr.withCredentials = true;
// body
if ('GET' != this.method && 'HEAD' != this.method && 'string' != typeof data && !isHost(data)) {
// serialize stuff
var serialize = request.serialize[this.getHeader('Content-Type')];
if (serialize) data = serialize(data);
}
// set header fields
for (var field in this.header) {
if (null == this.header[field]) continue;
xhr.setRequestHeader(field, this.header[field]);
}
// send stuff
this.emit('request', this);
xhr.send(data);
return this;
};
/**
* Expose `Request`.
*/
request.Request = Request;
/**
* Issue a request:
*
* Examples:
*
* request('GET', '/users').end(callback)
* request('/users').end(callback)
* request('/users', callback)
*
* @param {String} method
* @param {String|Function} url or callback
* @return {Request}
* @api public
*/
function request(method, url) {
// callback
if ('function' == typeof url) {
return new Request('GET', method).end(url);
}
// url first
if (1 == arguments.length) {
return new Request('GET', method);
}
return new Request(method, url);
}
/**
* GET `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.get = function(url, data, fn){
var req = request('GET', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.query(data);
if (fn) req.end(fn);
return req;
};
/**
* HEAD `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.head = function(url, data, fn){
var req = request('HEAD', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* DELETE `url` with optional callback `fn(res)`.
*
* @param {String} url
* @param {Function} fn
* @return {Request}
* @api public
*/
request.del = function(url, fn){
var req = request('DELETE', url);
if (fn) req.end(fn);
return req;
};
/**
* PATCH `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed} data
* @param {Function} fn
* @return {Request}
* @api public
*/
request.patch = function(url, data, fn){
var req = request('PATCH', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* POST `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed} data
* @param {Function} fn
* @return {Request}
* @api public
*/
request.post = function(url, data, fn){
var req = request('POST', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* PUT `url` with optional `data` and callback `fn(res)`.
*
* @param {String} url
* @param {Mixed|Function} data or fn
* @param {Function} fn
* @return {Request}
* @api public
*/
request.put = function(url, data, fn){
var req = request('PUT', url);
if ('function' == typeof data) fn = data, data = null;
if (data) req.send(data);
if (fn) req.end(fn);
return req;
};
/**
* Expose `request`.
*/
module.exports = request;
},{"emitter":177,"reduce":178}]},{},[]); | * LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory. |
Regularization.py | def regularization(InputImage, StructuringElementRadius=3):
"""
Compute the 3D scalar field that will be used to regularize the seeds propagation | Outputs:
* R: The 3D numpy array having the same size as InputImage, used for the regularization
"""
from scipy import ndimage
MSE = StructuringElementRadius
return ndimage.morphological_gradient(InputImage, size=(MSE, MSE, MSE)) | Inputs:
* InputImage: the 3D image that will be segmented. Must be a 3D numpy array.
* StructuringElementRadius: A structuring element of size (1+2*StructuringElementRadius) x (1+2*StructuringElementRadius) x (1+2*StructuringElementRadius) will be used |
_private_endpoint_connections_operations_async.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PrivateEndpointConnectionsOperations:
"""PrivateEndpointConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~synapse_management_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> "models.PrivateEndpointConnection":
"""Gets a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection, or the result of cls(response)
:rtype: ~synapse_management_client.models.PrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def _create_initial(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> "models.PrivateEndpointConnection":
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
# Construct URL
url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def begin_create(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> AsyncLROPoller["models.PrivateEndpointConnection"]:
"""Approve or reject a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~synapse_management_client.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> Optional["models.OperationResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.OperationResource"]]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 202:
deserialized = self._deserialize('OperationResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> AsyncLROPoller["models.OperationResource"]:
"""Delete a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationResource or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~synapse_management_client.models.OperationResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.OperationResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
def list( | **kwargs
) -> AsyncIterable["models.PrivateEndpointConnectionList"]:
"""Lists private endpoint connection in workspace.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnectionList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~synapse_management_client.models.PrivateEndpointConnectionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionList"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PrivateEndpointConnectionList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorContract, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore | self,
resource_group_name: str,
workspace_name: str, |
mod.rs | mod statements;
mod expressions;
mod debug;
pub mod analysis;
| pub use self::expressions::*; | pub use self::statements::*; |
ops.rs | #![feature(test)]
extern crate test;
extern crate eve;
use eve::ops::*;
use eve::solver::{Solver};
use eve::compiler::{parse_string};
use eve::indexes::{DistinctIter};
use test::Bencher;
#[bench]
pub fn round_holder_compute_output_rounds_bench(b:&mut Bencher) {
let mut holder = OutputRounds::new();
let rounds = vec![1,-1,0,0,1,0,-1];
holder.output_rounds = vec![(3,1), (5,1)];
b.iter(|| {
let iter = DistinctIter::new(&rounds);
holder.compute_output_rounds(iter);
});
}
fn test_solver(b: &mut Bencher, code: &str, setup:&str) {
let mut program = Program::new();
let to_test = parse_string(&mut program.state.interner, code, "test", false).pop().unwrap();
let solver = Solver::new(&mut program.state.interner, 0, 0, None, &to_test.constraints);
program.block_info.blocks.push(to_test);
let mut blocks = vec![];
blocks.extend(parse_string(&mut program.state.interner, setup, "test", false));
let mut txn = CodeTransaction::new();
txn.exec(&mut program, blocks, vec![]);
let mut pool = EstimateIterPool::new();
let mut frame = Frame::new();
frame.input = Some(Change {e:0, a:0, v:0, n:0, round:0, count:1, transaction:0});
frame.block_ix = 0;
b.iter(|| {
solver.run(&mut program.state, &mut pool, &mut frame)
});
}
#[bench]
pub fn ops_bind_solver(b:&mut Bencher) {
test_solver(b, r#"
search
f = [#foo bar baz]
[#meep foo:f woah]
bind
f.zomg += 3
end
"#, r#"
commit
foo = [#foo bar: 3 baz: 4]
[#meep foo woah:"yeah"]
end
"#);
}
#[bench]
fn bench_simple_gj(b:&mut Bencher) | {
// prog.block("simple block", ({find, record, lib}) => {
// let person = find("person");
// let text = `name: ${person.name}`;
// return [
// record("html/div", {person, text})
// ]
// });
//
// let mut program = Program::new();
// let constraints = vec![
// make_scan(register(0), program.state.interner.string("tag"), program.state.interner.string("person")),
// make_scan(register(0), program.state.interner.string("name"), register(1)),
// make_function("concat", vec![program.state.interner.string("name: "), register(1)], register(2)),
// make_function("gen_id", vec![register(0), register(2)], register(3)),
// // Constraint::Insert {e: program.state.interner.string("foo"), a: program.state.interner.string("tag"), v: program.state.interner.string("html/div")},
// // Constraint::Insert {e: program.state.interner.string("foo"), a: program.state.interner.string("person"), v: register(0)},
// // Constraint::Insert {e: program.state.interner.string("foo"), a: program.state.interner.string("text"), v: register(1)},
// Constraint::Insert {e: register(3), a: program.state.interner.string("tag"), v: program.state.interner.string("html/div"), commit: false},
// Constraint::Insert {e: register(3), a: program.state.interner.string("person"), v: register(0), commit: false},
// Constraint::Insert {e: register(3), a: program.state.interner.string("text"), v: register(2), commit: false},
// ];
// program.register_block(Block::new("simple_block", constraints));
// let mut ix = 0;
// let mut txn = Transaction::new();
// b.iter(|| {
// txn.clear();
// txn.input(program.state.interner.number_id(ix as f32), program.state.interner.string_id("tag"), program.state.interner.string_id("person"), 1);
// txn.input(program.state.interner.number_id(ix as f32), program.state.interner.string_id("name"), program.state.interner.number_id(ix as f32), 1);
// txn.exec(&mut program);
// ix += 1;
// });
// println!("Size: {:?}", program.index.size);
} |
|
_models.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import msrest.serialization
class AcsChatEventBaseProperties(msrest.serialization.Model):
"""Schema of common properties of all chat events.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatEventBaseProperties, self).__init__(**kwargs)
self.recipient_communication_identifier = kwargs.get('recipient_communication_identifier', None)
self.transaction_id = kwargs.get('transaction_id', None)
self.thread_id = kwargs.get('thread_id', None)
class AcsChatEventInThreadBaseProperties(msrest.serialization.Model):
"""Schema of common properties of all thread-level chat events.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatEventInThreadBaseProperties, self).__init__(**kwargs)
self.transaction_id = kwargs.get('transaction_id', None)
self.thread_id = kwargs.get('thread_id', None)
class AcsChatMessageEventBaseProperties(AcsChatEventBaseProperties):
"""Schema of common properties of all chat message events.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEventBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.sender_communication_identifier = kwargs.get('sender_communication_identifier', None)
self.sender_display_name = kwargs.get('sender_display_name', None)
self.compose_time = kwargs.get('compose_time', None)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', None)
class AcsChatMessageDeletedEventData(AcsChatMessageEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageDeleted event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param delete_time: The time at which the message was deleted.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageDeletedEventData, self).__init__(**kwargs)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatMessageEventInThreadBaseProperties(AcsChatEventInThreadBaseProperties):
"""Schema of common properties of all thread-level chat message events.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEventInThreadBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.sender_communication_identifier = kwargs.get('sender_communication_identifier', None)
self.sender_display_name = kwargs.get('sender_display_name', None)
self.compose_time = kwargs.get('compose_time', None)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', None)
class AcsChatMessageDeletedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageDeletedInThread event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param delete_time: The time at which the message was deleted.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageDeletedInThreadEventData, self).__init__(**kwargs)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatMessageEditedEventData(AcsChatMessageEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageEdited event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
:param edit_time: The time at which the message was edited.
:type edit_time: ~datetime.datetime
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEditedEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
self.edit_time = kwargs.get('edit_time', None)
class AcsChatMessageEditedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageEditedInThread event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
:param edit_time: The time at which the message was edited.
:type edit_time: ~datetime.datetime
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEditedInThreadEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
self.edit_time = kwargs.get('edit_time', None)
class AcsChatMessageReceivedEventData(AcsChatMessageEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageReceived event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageReceivedEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
class AcsChatMessageReceivedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageReceivedInThread event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageReceivedInThreadEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
class AcsChatParticipantAddedToThreadEventData(AcsChatEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadParticipantAdded event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param time: The time at which the user was added to the thread.
:type time: ~datetime.datetime
:param added_by_communication_identifier: The communication identifier of the user who added
the user.
:type added_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_added: The details of the user who was added.
:type participant_added: ~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'added_by_communication_identifier': {'key': 'addedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_added': {'key': 'participantAdded', 'type': 'AcsChatThreadParticipantProperties'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantAddedToThreadEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.added_by_communication_identifier = kwargs.get('added_by_communication_identifier', None)
self.participant_added = kwargs.get('participant_added', None)
self.version = kwargs.get('version', None)
class AcsChatThreadEventBaseProperties(AcsChatEventBaseProperties):
"""Schema of common properties of all chat thread events.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadEventBaseProperties, self).__init__(**kwargs)
self.create_time = kwargs.get('create_time', None)
self.version = kwargs.get('version', None)
class AcsChatParticipantAddedToThreadWithUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatParticipantAddedToThreadWithUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param time: The time at which the user was added to the thread.
:type time: ~datetime.datetime
:param added_by_communication_identifier: The communication identifier of the user who added
the user.
:type added_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_added: The details of the user who was added.
:type participant_added: ~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'time': {'key': 'time', 'type': 'iso-8601'},
'added_by_communication_identifier': {'key': 'addedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_added': {'key': 'participantAdded', 'type': 'AcsChatThreadParticipantProperties'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantAddedToThreadWithUserEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.added_by_communication_identifier = kwargs.get('added_by_communication_identifier', None)
self.participant_added = kwargs.get('participant_added', None)
class AcsChatParticipantRemovedFromThreadEventData(AcsChatEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadParticipantRemoved event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param time: The time at which the user was removed to the thread.
:type time: ~datetime.datetime
:param removed_by_communication_identifier: The communication identifier of the user who
removed the user.
:type removed_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_removed: The details of the user who was removed.
:type participant_removed:
~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'removed_by_communication_identifier': {'key': 'removedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_removed': {'key': 'participantRemoved', 'type': 'AcsChatThreadParticipantProperties'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantRemovedFromThreadEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.removed_by_communication_identifier = kwargs.get('removed_by_communication_identifier', None)
self.participant_removed = kwargs.get('participant_removed', None)
self.version = kwargs.get('version', None)
class AcsChatParticipantRemovedFromThreadWithUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatParticipantRemovedFromThreadWithUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param time: The time at which the user was removed to the thread.
:type time: ~datetime.datetime
:param removed_by_communication_identifier: The communication identifier of the user who
removed the user.
:type removed_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_removed: The details of the user who was removed.
:type participant_removed:
~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'time': {'key': 'time', 'type': 'iso-8601'},
'removed_by_communication_identifier': {'key': 'removedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_removed': {'key': 'participantRemoved', 'type': 'AcsChatThreadParticipantProperties'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantRemovedFromThreadWithUserEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.removed_by_communication_identifier = kwargs.get('removed_by_communication_identifier', None)
self.participant_removed = kwargs.get('participant_removed', None)
class AcsChatThreadEventInThreadBaseProperties(AcsChatEventInThreadBaseProperties):
"""Schema of common properties of all chat thread events.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadEventInThreadBaseProperties, self).__init__(**kwargs)
self.create_time = kwargs.get('create_time', None)
self.version = kwargs.get('version', None)
class AcsChatThreadCreatedEventData(AcsChatThreadEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadCreated event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param created_by_communication_identifier: The communication identifier of the user who
created the thread.
:type created_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param properties: The thread properties.
:type properties: dict[str, object]
:param participants: The list of properties of participants who are part of the thread.
:type participants:
list[~event_grid_publisher_client.models.AcsChatThreadParticipantProperties]
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'properties': {'key': 'properties', 'type': '{object}'},
'participants': {'key': 'participants', 'type': '[AcsChatThreadParticipantProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadCreatedEventData, self).__init__(**kwargs)
self.created_by_communication_identifier = kwargs.get('created_by_communication_identifier', None)
self.properties = kwargs.get('properties', None)
self.participants = kwargs.get('participants', None)
class AcsChatThreadCreatedWithUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadCreatedWithUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param created_by_communication_identifier: The communication identifier of the user who
created the thread.
:type created_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param properties: The thread properties.
:type properties: dict[str, object]
:param participants: The list of properties of participants who are part of the thread.
:type participants:
list[~event_grid_publisher_client.models.AcsChatThreadParticipantProperties]
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'properties': {'key': 'properties', 'type': '{object}'},
'participants': {'key': 'participants', 'type': '[AcsChatThreadParticipantProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadCreatedWithUserEventData, self).__init__(**kwargs)
self.created_by_communication_identifier = kwargs.get('created_by_communication_identifier', None)
self.properties = kwargs.get('properties', None)
self.participants = kwargs.get('participants', None)
class AcsChatThreadDeletedEventData(AcsChatThreadEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadDeleted event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param deleted_by_communication_identifier: The communication identifier of the user who
deleted the thread.
:type deleted_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param delete_time: The deletion time of the thread.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'deleted_by_communication_identifier': {'key': 'deletedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadDeletedEventData, self).__init__(**kwargs)
self.deleted_by_communication_identifier = kwargs.get('deleted_by_communication_identifier', None)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatThreadParticipantProperties(msrest.serialization.Model):
"""Schema of the chat thread participant.
:param display_name: The name of the user.
:type display_name: str
:param participant_communication_identifier: The communication identifier of the user.
:type participant_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'participant_communication_identifier': {'key': 'participantCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadParticipantProperties, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.participant_communication_identifier = kwargs.get('participant_communication_identifier', None)
class AcsChatThreadPropertiesUpdatedEventData(AcsChatThreadEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadPropertiesUpdated event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param edited_by_communication_identifier: The communication identifier of the user who updated
the thread properties.
:type edited_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param edit_time: The time at which the properties of the thread were updated.
:type edit_time: ~datetime.datetime
:param properties: The updated thread properties.
:type properties: dict[str, object]
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'edited_by_communication_identifier': {'key': 'editedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadPropertiesUpdatedEventData, self).__init__(**kwargs)
self.edited_by_communication_identifier = kwargs.get('edited_by_communication_identifier', None)
self.edit_time = kwargs.get('edit_time', None)
self.properties = kwargs.get('properties', None)
class AcsChatThreadPropertiesUpdatedPerUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadPropertiesUpdatedPerUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param edited_by_communication_identifier: The communication identifier of the user who updated
the thread properties.
:type edited_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param edit_time: The time at which the properties of the thread were updated.
:type edit_time: ~datetime.datetime
:param properties: The updated thread properties.
:type properties: dict[str, object]
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'edited_by_communication_identifier': {'key': 'editedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadPropertiesUpdatedPerUserEventData, self).__init__(**kwargs)
self.edited_by_communication_identifier = kwargs.get('edited_by_communication_identifier', None)
self.edit_time = kwargs.get('edit_time', None)
self.properties = kwargs.get('properties', None)
class AcsChatThreadWithUserDeletedEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadWithUserDeleted event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param deleted_by_communication_identifier: The communication identifier of the user who
deleted the thread.
:type deleted_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param delete_time: The deletion time of the thread.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'deleted_by_communication_identifier': {'key': 'deletedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadWithUserDeletedEventData, self).__init__(**kwargs)
self.deleted_by_communication_identifier = kwargs.get('deleted_by_communication_identifier', None)
self.delete_time = kwargs.get('delete_time', None)
class AcsRecordingChunkInfoProperties(msrest.serialization.Model):
"""Schema for all properties of Recording Chunk Information.
:param document_id: The documentId of the recording chunk.
:type document_id: str
:param index: The index of the recording chunk.
:type index: long
:param end_reason: The reason for ending the recording chunk.
:type end_reason: str
"""
_attribute_map = {
'document_id': {'key': 'documentId', 'type': 'str'},
'index': {'key': 'index', 'type': 'long'},
'end_reason': {'key': 'endReason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingChunkInfoProperties, self).__init__(**kwargs)
self.document_id = kwargs.get('document_id', None)
self.index = kwargs.get('index', None)
self.end_reason = kwargs.get('end_reason', None)
class AcsRecordingFileStatusUpdatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.RecordingFileStatusUpdated event.
:param recording_storage_info: The details of recording storage information.
:type recording_storage_info:
~event_grid_publisher_client.models.AcsRecordingStorageInfoProperties
:param recording_start_time: The time at which the recording started.
:type recording_start_time: ~datetime.datetime
:param recording_duration_ms: The recording duration in milliseconds.
:type recording_duration_ms: long
:param session_end_reason: The reason for ending recording session.
:type session_end_reason: str
"""
_attribute_map = {
'recording_storage_info': {'key': 'recordingStorageInfo', 'type': 'AcsRecordingStorageInfoProperties'},
'recording_start_time': {'key': 'recordingStartTime', 'type': 'iso-8601'},
'recording_duration_ms': {'key': 'recordingDurationMs', 'type': 'long'},
'session_end_reason': {'key': 'sessionEndReason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingFileStatusUpdatedEventData, self).__init__(**kwargs)
self.recording_storage_info = kwargs.get('recording_storage_info', None)
self.recording_start_time = kwargs.get('recording_start_time', None)
self.recording_duration_ms = kwargs.get('recording_duration_ms', None)
self.session_end_reason = kwargs.get('session_end_reason', None)
class AcsRecordingStorageInfoProperties(msrest.serialization.Model):
"""Schema for all properties of Recording Storage Information.
:param recording_chunks: List of details of recording chunks information.
:type recording_chunks:
list[~event_grid_publisher_client.models.AcsRecordingChunkInfoProperties]
"""
_attribute_map = {
'recording_chunks': {'key': 'recordingChunks', 'type': '[AcsRecordingChunkInfoProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingStorageInfoProperties, self).__init__(**kwargs)
self.recording_chunks = kwargs.get('recording_chunks', None)
class AcsSmsDeliveryAttemptProperties(msrest.serialization.Model):
"""Schema for details of a delivery attempt.
:param timestamp: TimeStamp when delivery was attempted.
:type timestamp: ~datetime.datetime
:param segments_succeeded: Number of segments that were successfully delivered.
:type segments_succeeded: int
:param segments_failed: Number of segments whose delivery failed.
:type segments_failed: int
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'segments_succeeded': {'key': 'segmentsSucceeded', 'type': 'int'},
'segments_failed': {'key': 'segmentsFailed', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsDeliveryAttemptProperties, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.segments_succeeded = kwargs.get('segments_succeeded', None)
self.segments_failed = kwargs.get('segments_failed', None)
class AcsSmsEventBaseProperties(msrest.serialization.Model):
"""Schema of common properties of all SMS events.
:param message_id: The identity of the SMS message.
:type message_id: str
:param from_property: The identity of SMS message sender.
:type from_property: str
:param to: The identity of SMS message receiver.
:type to: str
"""
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsEventBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.from_property = kwargs.get('from_property', None)
self.to = kwargs.get('to', None)
class AcsSmsDeliveryReportReceivedEventData(AcsSmsEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.SMSDeliveryReportReceived event.
:param message_id: The identity of the SMS message.
:type message_id: str
:param from_property: The identity of SMS message sender.
:type from_property: str
:param to: The identity of SMS message receiver.
:type to: str
:param delivery_status: Status of Delivery.
:type delivery_status: str
:param delivery_status_details: Details about Delivery Status.
:type delivery_status_details: str
:param delivery_attempts: List of details of delivery attempts made.
:type delivery_attempts:
list[~event_grid_publisher_client.models.AcsSmsDeliveryAttemptProperties]
:param received_timestamp: The time at which the SMS delivery report was received.
:type received_timestamp: ~datetime.datetime
:param tag: Customer Content.
:type tag: str
"""
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
'delivery_status': {'key': 'deliveryStatus', 'type': 'str'},
'delivery_status_details': {'key': 'deliveryStatusDetails', 'type': 'str'},
'delivery_attempts': {'key': 'deliveryAttempts', 'type': '[AcsSmsDeliveryAttemptProperties]'},
'received_timestamp': {'key': 'receivedTimestamp', 'type': 'iso-8601'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsDeliveryReportReceivedEventData, self).__init__(**kwargs)
self.delivery_status = kwargs.get('delivery_status', None)
self.delivery_status_details = kwargs.get('delivery_status_details', None)
self.delivery_attempts = kwargs.get('delivery_attempts', None)
self.received_timestamp = kwargs.get('received_timestamp', None)
self.tag = kwargs.get('tag', None)
class AcsSmsReceivedEventData(AcsSmsEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.SMSReceived event.
:param message_id: The identity of the SMS message.
:type message_id: str
:param from_property: The identity of SMS message sender.
:type from_property: str
:param to: The identity of SMS message receiver.
:type to: str
:param message: The SMS content.
:type message: str
:param received_timestamp: The time at which the SMS was received.
:type received_timestamp: ~datetime.datetime
"""
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'received_timestamp': {'key': 'receivedTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsReceivedEventData, self).__init__(**kwargs)
self.message = kwargs.get('message', None)
self.received_timestamp = kwargs.get('received_timestamp', None)
class AppConfigurationKeyValueDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.AppConfiguration.KeyValueDeleted event.
:param key: The key used to identify the key-value that was deleted.
:type key: str
:param label: The label, if any, used to identify the key-value that was deleted.
:type label: str
:param etag: The etag representing the key-value that was deleted.
:type etag: str
:param sync_token: The sync token representing the server state after the event.
:type sync_token: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'sync_token': {'key': 'syncToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppConfigurationKeyValueDeletedEventData, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.label = kwargs.get('label', None)
self.etag = kwargs.get('etag', None)
self.sync_token = kwargs.get('sync_token', None)
class AppConfigurationKeyValueModifiedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.AppConfiguration.KeyValueModified event.
:param key: The key used to identify the key-value that was modified.
:type key: str
:param label: The label, if any, used to identify the key-value that was modified.
:type label: str
:param etag: The etag representing the new state of the key-value.
:type etag: str
:param sync_token: The sync token representing the server state after the event.
:type sync_token: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'sync_token': {'key': 'syncToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppConfigurationKeyValueModifiedEventData, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.label = kwargs.get('label', None)
self.etag = kwargs.get('etag', None)
self.sync_token = kwargs.get('sync_token', None)
class AppEventTypeDetail(msrest.serialization.Model):
"""Detail of action on the app.
:param action: Type of action of the operation. Possible values include: "Restarted",
"Stopped", "ChangedAppSettings", "Started", "Completed", "Failed".
:type action: str or ~event_grid_publisher_client.models.AppAction
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppEventTypeDetail, self).__init__(**kwargs)
self.action = kwargs.get('action', None)
class AppServicePlanEventTypeDetail(msrest.serialization.Model):
"""Detail of action on the app service plan.
:param stamp_kind: Kind of environment where app service plan is. Possible values include:
"Public", "AseV1", "AseV2".
:type stamp_kind: str or ~event_grid_publisher_client.models.StampKind
:param action: Type of action on the app service plan. Possible values include: "Updated".
:type action: str or ~event_grid_publisher_client.models.AppServicePlanAction
:param status: Asynchronous operation status of the operation on the app service plan. Possible
values include: "Started", "Completed", "Failed".
:type status: str or ~event_grid_publisher_client.models.AsyncStatus
"""
_attribute_map = {
'stamp_kind': {'key': 'stampKind', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppServicePlanEventTypeDetail, self).__init__(**kwargs)
self.stamp_kind = kwargs.get('stamp_kind', None)
self.action = kwargs.get('action', None)
self.status = kwargs.get('status', None)
class CloudEvent(msrest.serialization.Model):
"""Properties of an event published to an Event Grid topic using the CloudEvent 1.0 Schema.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param id: Required. An identifier for the event. The combination of id and source must be
unique for each distinct event.
:type id: str
:param source: Required. Identifies the context in which an event happened. The combination of
id and source must be unique for each distinct event.
:type source: str
:param data: Event data specific to the event type.
:type data: object
:param data_base64: Event data specific to the event type, encoded as a base64 string.
:type data_base64: bytearray
:param type: Required. Type of event related to the originating occurrence.
:type type: str
:param time: The time (in UTC) the event was generated, in RFC3339 format.
:type time: ~datetime.datetime
:param specversion: Required. The version of the CloudEvents specification which the event
uses.
:type specversion: str
:param dataschema: Identifies the schema that data adheres to.
:type dataschema: str
:param datacontenttype: Content type of data value.
:type datacontenttype: str
:param subject: This describes the subject of the event in the context of the event producer
(identified by source).
:type subject: str
"""
_validation = {
'id': {'required': True},
'source': {'required': True},
'type': {'required': True},
'specversion': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'id': {'key': 'id', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'data_base64': {'key': 'data_base64', 'type': 'bytearray'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'specversion': {'key': 'specversion', 'type': 'str'},
'dataschema': {'key': 'dataschema', 'type': 'str'},
'datacontenttype': {'key': 'datacontenttype', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudEvent, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.id = kwargs['id']
self.source = kwargs['source']
self.data = kwargs.get('data', None)
self.data_base64 = kwargs.get('data_base64', None)
self.type = kwargs['type']
self.time = kwargs.get('time', None)
self.specversion = kwargs['specversion']
self.dataschema = kwargs.get('dataschema', None)
self.datacontenttype = kwargs.get('datacontenttype', None)
self.subject = kwargs.get('subject', None)
class CommunicationIdentifierModel(msrest.serialization.Model):
"""Identifies a participant in Azure Communication services. A participant is, for example, a phone number or an Azure communication user. This model must be interpreted as a union: Apart from rawId, at most one further property may be set.
:param raw_id: Raw Id of the identifier. Optional in requests, required in responses.
:type raw_id: str
:param communication_user: The communication user.
:type communication_user: ~event_grid_publisher_client.models.CommunicationUserIdentifierModel
:param phone_number: The phone number.
:type phone_number: ~event_grid_publisher_client.models.PhoneNumberIdentifierModel
:param microsoft_teams_user: The Microsoft Teams user.
:type microsoft_teams_user:
~event_grid_publisher_client.models.MicrosoftTeamsUserIdentifierModel
"""
_attribute_map = {
'raw_id': {'key': 'rawId', 'type': 'str'},
'communication_user': {'key': 'communicationUser', 'type': 'CommunicationUserIdentifierModel'},
'phone_number': {'key': 'phoneNumber', 'type': 'PhoneNumberIdentifierModel'},
'microsoft_teams_user': {'key': 'microsoftTeamsUser', 'type': 'MicrosoftTeamsUserIdentifierModel'},
}
def __init__(
self,
**kwargs
):
super(CommunicationIdentifierModel, self).__init__(**kwargs)
self.raw_id = kwargs.get('raw_id', None)
self.communication_user = kwargs.get('communication_user', None)
self.phone_number = kwargs.get('phone_number', None)
self.microsoft_teams_user = kwargs.get('microsoft_teams_user', None)
class CommunicationUserIdentifierModel(msrest.serialization.Model):
"""A user that got created with an Azure Communication Services resource.
All required parameters must be populated in order to send to Azure.
:param id: Required. The Id of the communication user.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CommunicationUserIdentifierModel, self).__init__(**kwargs)
self.id = kwargs['id']
class ContainerRegistryArtifactEventData(msrest.serialization.Model):
"""The content of the event request message.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryArtifactEventTarget
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryArtifactEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.timestamp = kwargs.get('timestamp', None)
self.action = kwargs.get('action', None)
self.target = kwargs.get('target', None)
class ContainerRegistryArtifactEventTarget(msrest.serialization.Model):
"""The target of the event.
:param media_type: The MIME type of the artifact.
:type media_type: str
:param size: The size in bytes of the artifact.
:type size: long
:param digest: The digest of the artifact.
:type digest: str
:param repository: The repository name of the artifact.
:type repository: str
:param tag: The tag of the artifact.
:type tag: str
:param name: The name of the artifact.
:type name: str
:param version: The version of the artifact.
:type version: str
"""
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryArtifactEventTarget, self).__init__(**kwargs)
self.media_type = kwargs.get('media_type', None)
self.size = kwargs.get('size', None)
self.digest = kwargs.get('digest', None)
self.repository = kwargs.get('repository', None)
self.tag = kwargs.get('tag', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class ContainerRegistryChartDeletedEventData(ContainerRegistryArtifactEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ChartDeleted event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryArtifactEventTarget
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryChartDeletedEventData, self).__init__(**kwargs)
class ContainerRegistryChartPushedEventData(ContainerRegistryArtifactEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ChartPushed event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryArtifactEventTarget
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryChartPushedEventData, self).__init__(**kwargs)
class ContainerRegistryEventActor(msrest.serialization.Model):
"""The agent that initiated the event. For most situations, this could be from the authorization context of the request.
:param name: The subject or username associated with the request context that generated the
event.
:type name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventActor, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class ContainerRegistryEventData(msrest.serialization.Model):
"""The content of the event request message.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryEventTarget
:param request: The request that generated the event.
:type request: ~event_grid_publisher_client.models.ContainerRegistryEventRequest
:param actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:type actor: ~event_grid_publisher_client.models.ContainerRegistryEventActor
:param source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:type source: ~event_grid_publisher_client.models.ContainerRegistryEventSource
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.timestamp = kwargs.get('timestamp', None)
self.action = kwargs.get('action', None)
self.target = kwargs.get('target', None)
self.request = kwargs.get('request', None)
self.actor = kwargs.get('actor', None)
self.source = kwargs.get('source', None)
class ContainerRegistryEventRequest(msrest.serialization.Model):
"""The request that generated the event.
:param id: The ID of the request that initiated the event.
:type id: str
:param addr: The IP or hostname and possibly port of the client connection that initiated the
event. This is the RemoteAddr from the standard http request.
:type addr: str
:param host: The externally accessible hostname of the registry instance, as specified by the
http host header on incoming requests.
:type host: str
:param method: The request method that generated the event.
:type method: str
:param useragent: The user agent header of the request.
:type useragent: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'addr': {'key': 'addr', 'type': 'str'},
'host': {'key': 'host', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'useragent': {'key': 'useragent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventRequest, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.addr = kwargs.get('addr', None)
self.host = kwargs.get('host', None)
self.method = kwargs.get('method', None)
self.useragent = kwargs.get('useragent', None)
class ContainerRegistryEventSource(msrest.serialization.Model):
"""The registry node that generated the event. Put differently, while the actor initiates the event, the source generates it.
:param addr: The IP or hostname and the port of the registry node that generated the event.
Generally, this will be resolved by os.Hostname() along with the running port.
:type addr: str
:param instance_id: The running instance of an application. Changes after each restart.
:type instance_id: str
"""
_attribute_map = {
'addr': {'key': 'addr', 'type': 'str'},
'instance_id': {'key': 'instanceID', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventSource, self).__init__(**kwargs)
self.addr = kwargs.get('addr', None)
self.instance_id = kwargs.get('instance_id', None)
class ContainerRegistryEventTarget(msrest.serialization.Model):
"""The target of the event.
:param media_type: The MIME type of the referenced object.
:type media_type: str
:param size: The number of bytes of the content. Same as Length field.
:type size: long
:param digest: The digest of the content, as defined by the Registry V2 HTTP API Specification.
:type digest: str
:param length: The number of bytes of the content. Same as Size field.
:type length: long
:param repository: The repository name.
:type repository: str
:param url: The direct URL to the content.
:type url: str
:param tag: The tag name.
:type tag: str
"""
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'length': {'key': 'length', 'type': 'long'},
'repository': {'key': 'repository', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventTarget, self).__init__(**kwargs)
self.media_type = kwargs.get('media_type', None)
self.size = kwargs.get('size', None)
self.digest = kwargs.get('digest', None)
self.length = kwargs.get('length', None)
self.repository = kwargs.get('repository', None)
self.url = kwargs.get('url', None)
self.tag = kwargs.get('tag', None)
class ContainerRegistryImageDeletedEventData(ContainerRegistryEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ImageDeleted event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryEventTarget
:param request: The request that generated the event.
:type request: ~event_grid_publisher_client.models.ContainerRegistryEventRequest
:param actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:type actor: ~event_grid_publisher_client.models.ContainerRegistryEventActor
:param source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:type source: ~event_grid_publisher_client.models.ContainerRegistryEventSource
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryImageDeletedEventData, self).__init__(**kwargs)
class ContainerRegistryImagePushedEventData(ContainerRegistryEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ImagePushed event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryEventTarget
:param request: The request that generated the event.
:type request: ~event_grid_publisher_client.models.ContainerRegistryEventRequest
:param actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:type actor: ~event_grid_publisher_client.models.ContainerRegistryEventActor
:param source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:type source: ~event_grid_publisher_client.models.ContainerRegistryEventSource
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryImagePushedEventData, self).__init__(**kwargs)
class DeviceConnectionStateEventInfo(msrest.serialization.Model):
"""Information about the device connection state event.
:param sequence_number: Sequence number is string representation of a hexadecimal number.
string compare can be used to identify the larger number because both in ASCII and HEX numbers
come after alphabets. If you are converting the string to hex, then the number is a 256 bit
number.
:type sequence_number: str
"""
_attribute_map = {
'sequence_number': {'key': 'sequenceNumber', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceConnectionStateEventInfo, self).__init__(**kwargs)
self.sequence_number = kwargs.get('sequence_number', None)
class DeviceConnectionStateEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a device connection state event (DeviceConnected, DeviceDisconnected).
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param module_id: The unique identifier of the module. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type module_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param device_connection_state_event_info: Information about the device connection state event.
:type device_connection_state_event_info:
~event_grid_publisher_client.models.DeviceConnectionStateEventInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(DeviceConnectionStateEventProperties, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.module_id = kwargs.get('module_id', None)
self.hub_name = kwargs.get('hub_name', None)
self.device_connection_state_event_info = kwargs.get('device_connection_state_event_info', None)
class DeviceLifeCycleEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a device life cycle event (DeviceCreated, DeviceDeleted).
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param twin: Information about the device twin, which is the cloud representation of
application device metadata.
:type twin: ~event_grid_publisher_client.models.DeviceTwinInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(DeviceLifeCycleEventProperties, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.hub_name = kwargs.get('hub_name', None)
self.twin = kwargs.get('twin', None)
class DeviceTelemetryEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a device telemetry event (DeviceTelemetry).
:param body: The content of the message from the device.
:type body: object
:param properties: Application properties are user-defined strings that can be added to the
message. These fields are optional.
:type properties: dict[str, str]
:param system_properties: System properties help identify contents and source of the messages.
:type system_properties: dict[str, str]
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'object'},
'properties': {'key': 'properties', 'type': '{str}'},
'system_properties': {'key': 'systemProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(DeviceTelemetryEventProperties, self).__init__(**kwargs)
self.body = kwargs.get('body', None)
self.properties = kwargs.get('properties', None)
self.system_properties = kwargs.get('system_properties', None)
class DeviceTwinInfo(msrest.serialization.Model):
"""Information about the device twin, which is the cloud representation of application device metadata.
:param authentication_type: Authentication type used for this device: either SAS, SelfSigned,
or CertificateAuthority.
:type authentication_type: str
:param cloud_to_device_message_count: Count of cloud to device messages sent to this device.
:type cloud_to_device_message_count: float
:param connection_state: Whether the device is connected or disconnected.
:type connection_state: str
:param device_id: The unique identifier of the device twin.
:type device_id: str
:param etag: A piece of information that describes the content of the device twin. Each etag is
guaranteed to be unique per device twin.
:type etag: str
:param last_activity_time: The ISO8601 timestamp of the last activity.
:type last_activity_time: str
:param properties: Properties JSON element.
:type properties: ~event_grid_publisher_client.models.DeviceTwinInfoProperties
:param status: Whether the device twin is enabled or disabled.
:type status: str
:param status_update_time: The ISO8601 timestamp of the last device twin status update.
:type status_update_time: str
:param version: An integer that is incremented by one each time the device twin is updated.
:type version: float
:param x509_thumbprint: The thumbprint is a unique value for the x509 certificate, commonly
used to find a particular certificate in a certificate store. The thumbprint is dynamically
generated using the SHA1 algorithm, and does not physically exist in the certificate.
:type x509_thumbprint: ~event_grid_publisher_client.models.DeviceTwinInfoX509Thumbprint
"""
_attribute_map = {
'authentication_type': {'key': 'authenticationType', 'type': 'str'},
'cloud_to_device_message_count': {'key': 'cloudToDeviceMessageCount', 'type': 'float'},
'connection_state': {'key': 'connectionState', 'type': 'str'},
'device_id': {'key': 'deviceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'DeviceTwinInfoProperties'},
'status': {'key': 'status', 'type': 'str'},
'status_update_time': {'key': 'statusUpdateTime', 'type': 'str'},
'version': {'key': 'version', 'type': 'float'},
'x509_thumbprint': {'key': 'x509Thumbprint', 'type': 'DeviceTwinInfoX509Thumbprint'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfo, self).__init__(**kwargs)
self.authentication_type = kwargs.get('authentication_type', None)
self.cloud_to_device_message_count = kwargs.get('cloud_to_device_message_count', None)
self.connection_state = kwargs.get('connection_state', None)
self.device_id = kwargs.get('device_id', None)
self.etag = kwargs.get('etag', None)
self.last_activity_time = kwargs.get('last_activity_time', None)
self.properties = kwargs.get('properties', None)
self.status = kwargs.get('status', None)
self.status_update_time = kwargs.get('status_update_time', None)
self.version = kwargs.get('version', None)
self.x509_thumbprint = kwargs.get('x509_thumbprint', None)
class DeviceTwinInfoProperties(msrest.serialization.Model):
"""Properties JSON element.
:param desired: A portion of the properties that can be written only by the application back-
end, and read by the device.
:type desired: ~event_grid_publisher_client.models.DeviceTwinProperties
:param reported: A portion of the properties that can be written only by the device, and read
by the application back-end.
:type reported: ~event_grid_publisher_client.models.DeviceTwinProperties
"""
_attribute_map = {
'desired': {'key': 'desired', 'type': 'DeviceTwinProperties'},
'reported': {'key': 'reported', 'type': 'DeviceTwinProperties'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfoProperties, self).__init__(**kwargs)
self.desired = kwargs.get('desired', None)
self.reported = kwargs.get('reported', None)
class DeviceTwinInfoX509Thumbprint(msrest.serialization.Model):
"""The thumbprint is a unique value for the x509 certificate, commonly used to find a particular certificate in a certificate store. The thumbprint is dynamically generated using the SHA1 algorithm, and does not physically exist in the certificate.
:param primary_thumbprint: Primary thumbprint for the x509 certificate.
:type primary_thumbprint: str
:param secondary_thumbprint: Secondary thumbprint for the x509 certificate.
:type secondary_thumbprint: str
"""
_attribute_map = {
'primary_thumbprint': {'key': 'primaryThumbprint', 'type': 'str'},
'secondary_thumbprint': {'key': 'secondaryThumbprint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfoX509Thumbprint, self).__init__(**kwargs)
self.primary_thumbprint = kwargs.get('primary_thumbprint', None)
self.secondary_thumbprint = kwargs.get('secondary_thumbprint', None)
class DeviceTwinMetadata(msrest.serialization.Model):
"""Metadata information for the properties JSON document.
:param last_updated: The ISO8601 timestamp of the last time the properties were updated.
:type last_updated: str
"""
_attribute_map = {
'last_updated': {'key': 'lastUpdated', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinMetadata, self).__init__(**kwargs)
self.last_updated = kwargs.get('last_updated', None)
class DeviceTwinProperties(msrest.serialization.Model):
"""A portion of the properties that can be written only by the application back-end, and read by the device.
:param metadata: Metadata information for the properties JSON document.
:type metadata: ~event_grid_publisher_client.models.DeviceTwinMetadata
:param version: Version of device twin properties.
:type version: float
"""
_attribute_map = {
'metadata': {'key': 'metadata', 'type': 'DeviceTwinMetadata'},
'version': {'key': 'version', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinProperties, self).__init__(**kwargs)
self.metadata = kwargs.get('metadata', None)
self.version = kwargs.get('version', None)
class EventGridEvent(msrest.serialization.Model):
"""Properties of an event published to an Event Grid topic using the EventGrid Schema.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Required. An unique identifier for the event.
:type id: str
:param topic: The resource path of the event source.
:type topic: str
:param subject: Required. A resource path relative to the topic path.
:type subject: str
:param data: Required. Event data specific to the event type.
:type data: object
:param event_type: Required. The type of the event that occurred.
:type event_type: str
:param event_time: Required. The time (in UTC) the event was generated.
:type event_time: ~datetime.datetime
:ivar metadata_version: The schema version of the event metadata.
:vartype metadata_version: str
:param data_version: Required. The schema version of the data object.
:type data_version: str
"""
_validation = {
'id': {'required': True},
'subject': {'required': True},
'data': {'required': True},
'event_type': {'required': True},
'event_time': {'required': True},
'metadata_version': {'readonly': True},
'data_version': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'topic': {'key': 'topic', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'event_type': {'key': 'eventType', 'type': 'str'},
'event_time': {'key': 'eventTime', 'type': 'iso-8601'},
'metadata_version': {'key': 'metadataVersion', 'type': 'str'},
'data_version': {'key': 'dataVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EventGridEvent, self).__init__(**kwargs)
self.id = kwargs['id']
self.topic = kwargs.get('topic', None)
self.subject = kwargs['subject']
self.data = kwargs['data']
self.event_type = kwargs['event_type']
self.event_time = kwargs['event_time']
self.metadata_version = None
self.data_version = kwargs['data_version']
class EventHubCaptureFileCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.EventHub.CaptureFileCreated event.
:param fileurl: The path to the capture file.
:type fileurl: str
:param file_type: The file type of the capture file.
:type file_type: str
:param partition_id: The shard ID.
:type partition_id: str
:param size_in_bytes: The file size.
:type size_in_bytes: int
:param event_count: The number of events in the file.
:type event_count: int
:param first_sequence_number: The smallest sequence number from the queue.
:type first_sequence_number: int
:param last_sequence_number: The last sequence number from the queue.
:type last_sequence_number: int
:param first_enqueue_time: The first time from the queue.
:type first_enqueue_time: ~datetime.datetime
:param last_enqueue_time: The last time from the queue.
:type last_enqueue_time: ~datetime.datetime
"""
_attribute_map = {
'fileurl': {'key': 'fileurl', 'type': 'str'},
'file_type': {'key': 'fileType', 'type': 'str'},
'partition_id': {'key': 'partitionId', 'type': 'str'},
'size_in_bytes': {'key': 'sizeInBytes', 'type': 'int'},
'event_count': {'key': 'eventCount', 'type': 'int'},
'first_sequence_number': {'key': 'firstSequenceNumber', 'type': 'int'},
'last_sequence_number': {'key': 'lastSequenceNumber', 'type': 'int'},
'first_enqueue_time': {'key': 'firstEnqueueTime', 'type': 'iso-8601'},
'last_enqueue_time': {'key': 'lastEnqueueTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(EventHubCaptureFileCreatedEventData, self).__init__(**kwargs)
self.fileurl = kwargs.get('fileurl', None)
self.file_type = kwargs.get('file_type', None)
self.partition_id = kwargs.get('partition_id', None)
self.size_in_bytes = kwargs.get('size_in_bytes', None)
self.event_count = kwargs.get('event_count', None)
self.first_sequence_number = kwargs.get('first_sequence_number', None)
self.last_sequence_number = kwargs.get('last_sequence_number', None)
self.first_enqueue_time = kwargs.get('first_enqueue_time', None)
self.last_enqueue_time = kwargs.get('last_enqueue_time', None)
class IotHubDeviceConnectedEventData(DeviceConnectionStateEventProperties):
"""Event data for Microsoft.Devices.DeviceConnected event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param module_id: The unique identifier of the module. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type module_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param device_connection_state_event_info: Information about the device connection state event.
:type device_connection_state_event_info:
~event_grid_publisher_client.models.DeviceConnectionStateEventInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceConnectedEventData, self).__init__(**kwargs)
class IotHubDeviceCreatedEventData(DeviceLifeCycleEventProperties):
"""Event data for Microsoft.Devices.DeviceCreated event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param twin: Information about the device twin, which is the cloud representation of
application device metadata.
:type twin: ~event_grid_publisher_client.models.DeviceTwinInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceCreatedEventData, self).__init__(**kwargs)
class IotHubDeviceDeletedEventData(DeviceLifeCycleEventProperties):
"""Event data for Microsoft.Devices.DeviceDeleted event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param twin: Information about the device twin, which is the cloud representation of
application device metadata.
:type twin: ~event_grid_publisher_client.models.DeviceTwinInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceDeletedEventData, self).__init__(**kwargs)
class IotHubDeviceDisconnectedEventData(DeviceConnectionStateEventProperties):
"""Event data for Microsoft.Devices.DeviceDisconnected event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param module_id: The unique identifier of the module. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type module_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param device_connection_state_event_info: Information about the device connection state event.
:type device_connection_state_event_info:
~event_grid_publisher_client.models.DeviceConnectionStateEventInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceDisconnectedEventData, self).__init__(**kwargs)
class IotHubDeviceTelemetryEventData(DeviceTelemetryEventProperties):
"""Event data for Microsoft.Devices.DeviceTelemetry event.
:param body: The content of the message from the device.
:type body: object
:param properties: Application properties are user-defined strings that can be added to the
message. These fields are optional.
:type properties: dict[str, str]
:param system_properties: System properties help identify contents and source of the messages.
:type system_properties: dict[str, str]
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'object'},
'properties': {'key': 'properties', 'type': '{str}'},
'system_properties': {'key': 'systemProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceTelemetryEventData, self).__init__(**kwargs)
class KeyVaultAccessPolicyChangedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.VaultAccessPolicyChanged event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultAccessPolicyChangedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateExpiredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.CertificateExpired event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateNearExpiryEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.CertificateNearExpiry event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateNewVersionCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.CertificateNewVersionCreated event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyExpiredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.KeyExpired event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyNearExpiryEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.KeyNearExpiry event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyNewVersionCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.KeyNewVersionCreated event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretExpiredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.SecretExpired event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretNearExpiryEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.SecretNearExpiry event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretNewVersionCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.SecretNewVersionCreated event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class MachineLearningServicesDatasetDriftDetectedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.DatasetDriftDetected event.
:param data_drift_id: The ID of the data drift monitor that triggered the event.
:type data_drift_id: str
:param data_drift_name: The name of the data drift monitor that triggered the event.
:type data_drift_name: str
:param run_id: The ID of the Run that detected data drift.
:type run_id: str
:param base_dataset_id: The ID of the base Dataset used to detect drift.
:type base_dataset_id: str
:param target_dataset_id: The ID of the target Dataset used to detect drift.
:type target_dataset_id: str
:param drift_coefficient: The coefficient result that triggered the event.
:type drift_coefficient: float
:param start_time: The start time of the target dataset time series that resulted in drift
detection.
:type start_time: ~datetime.datetime
:param end_time: The end time of the target dataset time series that resulted in drift
detection.
:type end_time: ~datetime.datetime
"""
_attribute_map = {
'data_drift_id': {'key': 'dataDriftId', 'type': 'str'},
'data_drift_name': {'key': 'dataDriftName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'base_dataset_id': {'key': 'baseDatasetId', 'type': 'str'},
'target_dataset_id': {'key': 'targetDatasetId', 'type': 'str'},
'drift_coefficient': {'key': 'driftCoefficient', 'type': 'float'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesDatasetDriftDetectedEventData, self).__init__(**kwargs)
self.data_drift_id = kwargs.get('data_drift_id', None)
self.data_drift_name = kwargs.get('data_drift_name', None)
self.run_id = kwargs.get('run_id', None)
self.base_dataset_id = kwargs.get('base_dataset_id', None)
self.target_dataset_id = kwargs.get('target_dataset_id', None)
self.drift_coefficient = kwargs.get('drift_coefficient', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
class MachineLearningServicesModelDeployedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.ModelDeployed event.
:param service_name: The name of the deployed service.
:type service_name: str
:param service_compute_type: The compute type (e.g. ACI, AKS) of the deployed service.
:type service_compute_type: str
:param model_ids: A common separated list of model IDs. The IDs of the models deployed in the
service.
:type model_ids: str
:param service_tags: The tags of the deployed service.
:type service_tags: object
:param service_properties: The properties of the deployed service.
:type service_properties: object
"""
_attribute_map = {
'service_name': {'key': 'serviceName', 'type': 'str'},
'service_compute_type': {'key': 'serviceComputeType', 'type': 'str'},
'model_ids': {'key': 'modelIds', 'type': 'str'},
'service_tags': {'key': 'serviceTags', 'type': 'object'},
'service_properties': {'key': 'serviceProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesModelDeployedEventData, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.service_compute_type = kwargs.get('service_compute_type', None)
self.model_ids = kwargs.get('model_ids', None)
self.service_tags = kwargs.get('service_tags', None)
self.service_properties = kwargs.get('service_properties', None)
class MachineLearningServicesModelRegisteredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.ModelRegistered event.
:param model_name: The name of the model that was registered.
:type model_name: str
:param model_version: The version of the model that was registered.
:type model_version: str
:param model_tags: The tags of the model that was registered.
:type model_tags: object
:param model_properties: The properties of the model that was registered.
:type model_properties: object
"""
_attribute_map = {
'model_name': {'key': 'modelName', 'type': 'str'},
'model_version': {'key': 'modelVersion', 'type': 'str'},
'model_tags': {'key': 'modelTags', 'type': 'object'},
'model_properties': {'key': 'modelProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesModelRegisteredEventData, self).__init__(**kwargs)
self.model_name = kwargs.get('model_name', None)
self.model_version = kwargs.get('model_version', None)
self.model_tags = kwargs.get('model_tags', None)
self.model_properties = kwargs.get('model_properties', None)
class MachineLearningServicesRunCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.RunCompleted event.
:param experiment_id: The ID of the experiment that the run belongs to.
:type experiment_id: str
:param experiment_name: The name of the experiment that the run belongs to.
:type experiment_name: str
:param run_id: The ID of the Run that was completed.
:type run_id: str
:param run_type: The Run Type of the completed Run.
:type run_type: str
:param run_tags: The tags of the completed Run.
:type run_tags: object
:param run_properties: The properties of the completed Run.
:type run_properties: object
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_tags': {'key': 'runTags', 'type': 'object'},
'run_properties': {'key': 'runProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesRunCompletedEventData, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_tags = kwargs.get('run_tags', None)
self.run_properties = kwargs.get('run_properties', None)
class MachineLearningServicesRunStatusChangedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.RunStatusChanged event.
:param experiment_id: The ID of the experiment that the Machine Learning Run belongs to.
:type experiment_id: str
:param experiment_name: The name of the experiment that the Machine Learning Run belongs to.
:type experiment_name: str
:param run_id: The ID of the Machine Learning Run.
:type run_id: str
:param run_type: The Run Type of the Machine Learning Run.
:type run_type: str
:param run_tags: The tags of the Machine Learning Run.
:type run_tags: object
:param run_properties: The properties of the Machine Learning Run.
:type run_properties: object
:param run_status: The status of the Machine Learning Run.
:type run_status: str
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_tags': {'key': 'runTags', 'type': 'object'},
'run_properties': {'key': 'runProperties', 'type': 'object'},
'run_status': {'key': 'runStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesRunStatusChangedEventData, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_tags = kwargs.get('run_tags', None)
self.run_properties = kwargs.get('run_properties', None)
self.run_status = kwargs.get('run_status', None)
class MapsGeofenceEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Geofence event (GeofenceEntered, GeofenceExited, GeofenceResult).
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceEventProperties, self).__init__(**kwargs)
self.expired_geofence_geometry_id = kwargs.get('expired_geofence_geometry_id', None)
self.geometries = kwargs.get('geometries', None)
self.invalid_period_geofence_geometry_id = kwargs.get('invalid_period_geofence_geometry_id', None)
self.is_event_published = kwargs.get('is_event_published', None)
class MapsGeofenceEnteredEventData(MapsGeofenceEventProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Maps.GeofenceEntered event.
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceEnteredEventData, self).__init__(**kwargs)
class MapsGeofenceExitedEventData(MapsGeofenceEventProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Maps.GeofenceExited event.
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceExitedEventData, self).__init__(**kwargs)
class MapsGeofenceGeometry(msrest.serialization.Model):
"""The geofence geometry.
:param device_id: ID of the device.
:type device_id: str
:param distance: Distance from the coordinate to the closest border of the geofence. Positive
means the coordinate is outside of the geofence. If the coordinate is outside of the geofence,
but more than the value of searchBuffer away from the closest geofence border, then the value
is 999. Negative means the coordinate is inside of the geofence. If the coordinate is inside
the polygon, but more than the value of searchBuffer away from the closest geofencing
border,then the value is -999. A value of 999 means that there is great confidence the
coordinate is well outside the geofence. A value of -999 means that there is great confidence
the coordinate is well within the geofence.
:type distance: float
:param geometry_id: The unique ID for the geofence geometry.
:type geometry_id: str
:param nearest_lat: Latitude of the nearest point of the geometry.
:type nearest_lat: float
:param nearest_lon: Longitude of the nearest point of the geometry.
:type nearest_lon: float
:param ud_id: The unique id returned from user upload service when uploading a geofence. Will
not be included in geofencing post API.
:type ud_id: str
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'distance': {'key': 'distance', 'type': 'float'},
'geometry_id': {'key': 'geometryId', 'type': 'str'},
'nearest_lat': {'key': 'nearestLat', 'type': 'float'},
'nearest_lon': {'key': 'nearestLon', 'type': 'float'},
'ud_id': {'key': 'udId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceGeometry, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.distance = kwargs.get('distance', None)
self.geometry_id = kwargs.get('geometry_id', None)
self.nearest_lat = kwargs.get('nearest_lat', None)
self.nearest_lon = kwargs.get('nearest_lon', None)
self.ud_id = kwargs.get('ud_id', None)
class MapsGeofenceResultEventData(MapsGeofenceEventProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Maps.GeofenceResult event.
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceResultEventData, self).__init__(**kwargs)
class MediaJobStateChangeEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Media.JobStateChange event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobStateChangeEventData, self).__init__(**kwargs)
self.previous_state = None
self.state = None
self.correlation_data = kwargs.get('correlation_data', None)
class MediaJobCanceledEventData(MediaJobStateChangeEventData):
"""Job canceled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobCanceled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
:param outputs: Gets the Job outputs.
:type outputs: list[~event_grid_publisher_client.models.MediaJobOutput]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobCanceledEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobCancelingEventData(MediaJobStateChangeEventData):
"""Job canceling event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobCanceling event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobCancelingEventData, self).__init__(**kwargs)
class MediaJobError(msrest.serialization.Model):
"""Details of JobOutput errors.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Error code describing the error. Possible values include: "ServiceError",
"ServiceTransientError", "DownloadNotAccessible", "DownloadTransientError",
"UploadNotAccessible", "UploadTransientError", "ConfigurationUnsupported", "ContentMalformed",
"ContentUnsupported".
:vartype code: str or ~event_grid_publisher_client.models.MediaJobErrorCode
:ivar message: A human-readable language-dependent representation of the error.
:vartype message: str
:ivar category: Helps with categorization of errors. Possible values include: "Service",
"Download", "Upload", "Configuration", "Content".
:vartype category: str or ~event_grid_publisher_client.models.MediaJobErrorCategory
:ivar retry: Indicates that it may be possible to retry the Job. If retry is unsuccessful,
please contact Azure support via Azure Portal. Possible values include: "DoNotRetry",
"MayRetry".
:vartype retry: str or ~event_grid_publisher_client.models.MediaJobRetry
:ivar details: An array of details about specific errors that led to this reported error.
:vartype details: list[~event_grid_publisher_client.models.MediaJobErrorDetail]
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'category': {'readonly': True},
'retry': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'category': {'key': 'category', 'type': 'str'},
'retry': {'key': 'retry', 'type': 'str'},
'details': {'key': 'details', 'type': '[MediaJobErrorDetail]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobError, self).__init__(**kwargs)
self.code = None
self.message = None
self.category = None
self.retry = None
self.details = None
class MediaJobErrorDetail(msrest.serialization.Model):
"""Details of JobOutput errors.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Code describing the error detail.
:vartype code: str
:ivar message: A human-readable representation of the error.
:vartype message: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaJobErrorDetail, self).__init__(**kwargs)
self.code = None
self.message = None
class MediaJobErroredEventData(MediaJobStateChangeEventData):
"""Job error state event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobErrored event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
:param outputs: Gets the Job outputs.
:type outputs: list[~event_grid_publisher_client.models.MediaJobOutput]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobErroredEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobFinishedEventData(MediaJobStateChangeEventData):
"""Job finished event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobFinished event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
:param outputs: Gets the Job outputs.
:type outputs: list[~event_grid_publisher_client.models.MediaJobOutput]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobFinishedEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobOutput(msrest.serialization.Model):
"""The event data for a Job output.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: MediaJobOutputAsset.
All required parameters must be populated in order to send to Azure.
:param odata_type: The discriminator for derived types.Constant filled by server.
:type odata_type: str
:param error: Gets the Job output error.
:type error: ~event_grid_publisher_client.models.MediaJobError
:param label: Gets the Job output label.
:type label: str
:param progress: Required. Gets the Job output progress.
:type progress: long
:param state: Required. Gets the Job output state. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:type state: str or ~event_grid_publisher_client.models.MediaJobState
"""
_validation = {
'progress': {'required': True},
'state': {'required': True},
}
| 'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'state': {'key': 'state', 'type': 'str'},
}
_subtype_map = {
'odata_type': {'#Microsoft.Media.JobOutputAsset': 'MediaJobOutputAsset'}
}
def __init__(
self,
**kwargs
):
super(MediaJobOutput, self).__init__(**kwargs)
self.odata_type = None # type: Optional[str]
self.error = kwargs.get('error', None)
self.label = kwargs.get('label', None)
self.progress = kwargs['progress']
self.state = kwargs['state']
class MediaJobOutputAsset(MediaJobOutput):
"""The event data for a Job output asset.
All required parameters must be populated in order to send to Azure.
:param odata_type: The discriminator for derived types.Constant filled by server.
:type odata_type: str
:param error: Gets the Job output error.
:type error: ~event_grid_publisher_client.models.MediaJobError
:param label: Gets the Job output label.
:type label: str
:param progress: Required. Gets the Job output progress.
:type progress: long
:param state: Required. Gets the Job output state. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:type state: str or ~event_grid_publisher_client.models.MediaJobState
:param asset_name: Gets the Job output asset name.
:type asset_name: str
"""
_validation = {
'progress': {'required': True},
'state': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'error': {'key': 'error', 'type': 'MediaJobError'},
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'state': {'key': 'state', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputAsset, self).__init__(**kwargs)
self.odata_type = '#Microsoft.Media.JobOutputAsset' # type: str
self.asset_name = kwargs.get('asset_name', None)
class MediaJobOutputStateChangeEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Media.JobOutputStateChange event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputStateChangeEventData, self).__init__(**kwargs)
self.previous_state = None
self.output = kwargs.get('output', None)
self.job_correlation_data = kwargs.get('job_correlation_data', None)
class MediaJobOutputCanceledEventData(MediaJobOutputStateChangeEventData):
"""Job output canceled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputCanceled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputCanceledEventData, self).__init__(**kwargs)
class MediaJobOutputCancelingEventData(MediaJobOutputStateChangeEventData):
"""Job output canceling event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputCanceling event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputCancelingEventData, self).__init__(**kwargs)
class MediaJobOutputErroredEventData(MediaJobOutputStateChangeEventData):
"""Job output error event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputErrored event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputErroredEventData, self).__init__(**kwargs)
class MediaJobOutputFinishedEventData(MediaJobOutputStateChangeEventData):
"""Job output finished event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputFinished event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputFinishedEventData, self).__init__(**kwargs)
class MediaJobOutputProcessingEventData(MediaJobOutputStateChangeEventData):
"""Job output processing event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputProcessing event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputProcessingEventData, self).__init__(**kwargs)
class MediaJobOutputProgressEventData(msrest.serialization.Model):
"""Job Output Progress Event Data. Schema of the Data property of an EventGridEvent for a Microsoft.Media.JobOutputProgress event.
:param label: Gets the Job output label.
:type label: str
:param progress: Gets the Job output progress.
:type progress: long
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_attribute_map = {
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputProgressEventData, self).__init__(**kwargs)
self.label = kwargs.get('label', None)
self.progress = kwargs.get('progress', None)
self.job_correlation_data = kwargs.get('job_correlation_data', None)
class MediaJobOutputScheduledEventData(MediaJobOutputStateChangeEventData):
"""Job output scheduled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputScheduled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputScheduledEventData, self).__init__(**kwargs)
class MediaJobProcessingEventData(MediaJobStateChangeEventData):
"""Job processing event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobProcessing event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobProcessingEventData, self).__init__(**kwargs)
class MediaJobScheduledEventData(MediaJobStateChangeEventData):
"""Job scheduled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobScheduled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobScheduledEventData, self).__init__(**kwargs)
class MediaLiveEventConnectionRejectedEventData(msrest.serialization.Model):
"""Encoder connection rejected event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventConnectionRejected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar stream_id: Gets the stream Id.
:vartype stream_id: str
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
:ivar result_code: Gets the result code.
:vartype result_code: str
"""
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'result_code': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventConnectionRejectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
self.result_code = None
class MediaLiveEventEncoderConnectedEventData(msrest.serialization.Model):
"""Encoder connect event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventEncoderConnected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar stream_id: Gets the stream Id.
:vartype stream_id: str
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
"""
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventEncoderConnectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
class MediaLiveEventEncoderDisconnectedEventData(msrest.serialization.Model):
"""Encoder disconnected event data. Schema of the Data property of an EventGridEvent for a Microsoft.Media.LiveEventEncoderDisconnected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar stream_id: Gets the stream Id.
:vartype stream_id: str
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
:ivar result_code: Gets the result code.
:vartype result_code: str
"""
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'result_code': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventEncoderDisconnectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
self.result_code = None
class MediaLiveEventIncomingDataChunkDroppedEventData(msrest.serialization.Model):
"""Ingest fragment dropped event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingDataChunkDropped event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar timestamp: Gets the timestamp of the data chunk dropped.
:vartype timestamp: str
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar bitrate: Gets the bitrate of the track.
:vartype bitrate: long
:ivar timescale: Gets the timescale of the Timestamp.
:vartype timescale: str
:ivar result_code: Gets the result code for fragment drop operation.
:vartype result_code: str
:ivar track_name: Gets the name of the track for which fragment is dropped.
:vartype track_name: str
"""
_validation = {
'timestamp': {'readonly': True},
'track_type': {'readonly': True},
'bitrate': {'readonly': True},
'timescale': {'readonly': True},
'result_code': {'readonly': True},
'track_name': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'str'},
'track_type': {'key': 'trackType', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'timescale': {'key': 'timescale', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingDataChunkDroppedEventData, self).__init__(**kwargs)
self.timestamp = None
self.track_type = None
self.bitrate = None
self.timescale = None
self.result_code = None
self.track_name = None
class MediaLiveEventIncomingStreamReceivedEventData(msrest.serialization.Model):
"""Encoder connect event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingStreamReceived event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar track_name: Gets the track name.
:vartype track_name: str
:ivar bitrate: Gets the bitrate of the track.
:vartype bitrate: long
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
:ivar timestamp: Gets the first timestamp of the data chunk received.
:vartype timestamp: str
:ivar duration: Gets the duration of the first data chunk.
:vartype duration: str
:ivar timescale: Gets the timescale in which timestamp is represented.
:vartype timescale: str
"""
_validation = {
'ingest_url': {'readonly': True},
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'timestamp': {'readonly': True},
'duration': {'readonly': True},
'timescale': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingStreamReceivedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.track_type = None
self.track_name = None
self.bitrate = None
self.encoder_ip = None
self.encoder_port = None
self.timestamp = None
self.duration = None
self.timescale = None
class MediaLiveEventIncomingStreamsOutOfSyncEventData(msrest.serialization.Model):
"""Incoming streams out of sync event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingStreamsOutOfSync event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar min_last_timestamp: Gets the minimum last timestamp received.
:vartype min_last_timestamp: str
:ivar type_of_stream_with_min_last_timestamp: Gets the type of stream with minimum last
timestamp.
:vartype type_of_stream_with_min_last_timestamp: str
:ivar max_last_timestamp: Gets the maximum timestamp among all the tracks (audio or video).
:vartype max_last_timestamp: str
:ivar type_of_stream_with_max_last_timestamp: Gets the type of stream with maximum last
timestamp.
:vartype type_of_stream_with_max_last_timestamp: str
:ivar timescale_of_min_last_timestamp: Gets the timescale in which "MinLastTimestamp" is
represented.
:vartype timescale_of_min_last_timestamp: str
:ivar timescale_of_max_last_timestamp: Gets the timescale in which "MaxLastTimestamp" is
represented.
:vartype timescale_of_max_last_timestamp: str
"""
_validation = {
'min_last_timestamp': {'readonly': True},
'type_of_stream_with_min_last_timestamp': {'readonly': True},
'max_last_timestamp': {'readonly': True},
'type_of_stream_with_max_last_timestamp': {'readonly': True},
'timescale_of_min_last_timestamp': {'readonly': True},
'timescale_of_max_last_timestamp': {'readonly': True},
}
_attribute_map = {
'min_last_timestamp': {'key': 'minLastTimestamp', 'type': 'str'},
'type_of_stream_with_min_last_timestamp': {'key': 'typeOfStreamWithMinLastTimestamp', 'type': 'str'},
'max_last_timestamp': {'key': 'maxLastTimestamp', 'type': 'str'},
'type_of_stream_with_max_last_timestamp': {'key': 'typeOfStreamWithMaxLastTimestamp', 'type': 'str'},
'timescale_of_min_last_timestamp': {'key': 'timescaleOfMinLastTimestamp', 'type': 'str'},
'timescale_of_max_last_timestamp': {'key': 'timescaleOfMaxLastTimestamp', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingStreamsOutOfSyncEventData, self).__init__(**kwargs)
self.min_last_timestamp = None
self.type_of_stream_with_min_last_timestamp = None
self.max_last_timestamp = None
self.type_of_stream_with_max_last_timestamp = None
self.timescale_of_min_last_timestamp = None
self.timescale_of_max_last_timestamp = None
class MediaLiveEventIncomingVideoStreamsOutOfSyncEventData(msrest.serialization.Model):
"""Incoming video stream out of synch event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingVideoStreamsOutOfSync event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar first_timestamp: Gets the first timestamp received for one of the quality levels.
:vartype first_timestamp: str
:ivar first_duration: Gets the duration of the data chunk with first timestamp.
:vartype first_duration: str
:ivar second_timestamp: Gets the timestamp received for some other quality levels.
:vartype second_timestamp: str
:ivar second_duration: Gets the duration of the data chunk with second timestamp.
:vartype second_duration: str
:ivar timescale: Gets the timescale in which both the timestamps and durations are represented.
:vartype timescale: str
"""
_validation = {
'first_timestamp': {'readonly': True},
'first_duration': {'readonly': True},
'second_timestamp': {'readonly': True},
'second_duration': {'readonly': True},
'timescale': {'readonly': True},
}
_attribute_map = {
'first_timestamp': {'key': 'firstTimestamp', 'type': 'str'},
'first_duration': {'key': 'firstDuration', 'type': 'str'},
'second_timestamp': {'key': 'secondTimestamp', 'type': 'str'},
'second_duration': {'key': 'secondDuration', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingVideoStreamsOutOfSyncEventData, self).__init__(**kwargs)
self.first_timestamp = None
self.first_duration = None
self.second_timestamp = None
self.second_duration = None
self.timescale = None
class MediaLiveEventIngestHeartbeatEventData(msrest.serialization.Model):
"""Ingest fragment dropped event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIngestHeartbeat event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar track_name: Gets the track name.
:vartype track_name: str
:ivar bitrate: Gets the bitrate of the track.
:vartype bitrate: long
:ivar incoming_bitrate: Gets the incoming bitrate.
:vartype incoming_bitrate: long
:ivar last_timestamp: Gets the last timestamp.
:vartype last_timestamp: str
:ivar timescale: Gets the timescale of the last timestamp.
:vartype timescale: str
:ivar overlap_count: Gets the fragment Overlap count.
:vartype overlap_count: long
:ivar discontinuity_count: Gets the fragment Discontinuity count.
:vartype discontinuity_count: long
:ivar nonincreasing_count: Gets Non increasing count.
:vartype nonincreasing_count: long
:ivar unexpected_bitrate: Gets a value indicating whether unexpected bitrate is present or not.
:vartype unexpected_bitrate: bool
:ivar state: Gets the state of the live event.
:vartype state: str
:ivar healthy: Gets a value indicating whether preview is healthy or not.
:vartype healthy: bool
"""
_validation = {
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'incoming_bitrate': {'readonly': True},
'last_timestamp': {'readonly': True},
'timescale': {'readonly': True},
'overlap_count': {'readonly': True},
'discontinuity_count': {'readonly': True},
'nonincreasing_count': {'readonly': True},
'unexpected_bitrate': {'readonly': True},
'state': {'readonly': True},
'healthy': {'readonly': True},
}
_attribute_map = {
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'incoming_bitrate': {'key': 'incomingBitrate', 'type': 'long'},
'last_timestamp': {'key': 'lastTimestamp', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
'overlap_count': {'key': 'overlapCount', 'type': 'long'},
'discontinuity_count': {'key': 'discontinuityCount', 'type': 'long'},
'nonincreasing_count': {'key': 'nonincreasingCount', 'type': 'long'},
'unexpected_bitrate': {'key': 'unexpectedBitrate', 'type': 'bool'},
'state': {'key': 'state', 'type': 'str'},
'healthy': {'key': 'healthy', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIngestHeartbeatEventData, self).__init__(**kwargs)
self.track_type = None
self.track_name = None
self.bitrate = None
self.incoming_bitrate = None
self.last_timestamp = None
self.timescale = None
self.overlap_count = None
self.discontinuity_count = None
self.nonincreasing_count = None
self.unexpected_bitrate = None
self.state = None
self.healthy = None
class MediaLiveEventTrackDiscontinuityDetectedEventData(msrest.serialization.Model):
"""Ingest track discontinuity detected event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventTrackDiscontinuityDetected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar track_name: Gets the track name.
:vartype track_name: str
:ivar bitrate: Gets the bitrate.
:vartype bitrate: long
:ivar previous_timestamp: Gets the timestamp of the previous fragment.
:vartype previous_timestamp: str
:ivar new_timestamp: Gets the timestamp of the current fragment.
:vartype new_timestamp: str
:ivar timescale: Gets the timescale in which both timestamps and discontinuity gap are
represented.
:vartype timescale: str
:ivar discontinuity_gap: Gets the discontinuity gap between PreviousTimestamp and NewTimestamp.
:vartype discontinuity_gap: str
"""
_validation = {
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'previous_timestamp': {'readonly': True},
'new_timestamp': {'readonly': True},
'timescale': {'readonly': True},
'discontinuity_gap': {'readonly': True},
}
_attribute_map = {
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'previous_timestamp': {'key': 'previousTimestamp', 'type': 'str'},
'new_timestamp': {'key': 'newTimestamp', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
'discontinuity_gap': {'key': 'discontinuityGap', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventTrackDiscontinuityDetectedEventData, self).__init__(**kwargs)
self.track_type = None
self.track_name = None
self.bitrate = None
self.previous_timestamp = None
self.new_timestamp = None
self.timescale = None
self.discontinuity_gap = None
class MicrosoftTeamsUserIdentifierModel(msrest.serialization.Model):
"""A Microsoft Teams user.
All required parameters must be populated in order to send to Azure.
:param user_id: Required. The Id of the Microsoft Teams user. If not anonymous, this is the AAD
object Id of the user.
:type user_id: str
:param is_anonymous: True if the Microsoft Teams user is anonymous. By default false if
missing.
:type is_anonymous: bool
:param cloud: The cloud that the Microsoft Teams user belongs to. By default 'public' if
missing. Possible values include: "public", "dod", "gcch".
:type cloud: str or ~event_grid_publisher_client.models.CommunicationCloudEnvironmentModel
"""
_validation = {
'user_id': {'required': True},
}
_attribute_map = {
'user_id': {'key': 'userId', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'cloud': {'key': 'cloud', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MicrosoftTeamsUserIdentifierModel, self).__init__(**kwargs)
self.user_id = kwargs['user_id']
self.is_anonymous = kwargs.get('is_anonymous', None)
self.cloud = kwargs.get('cloud', None)
class PhoneNumberIdentifierModel(msrest.serialization.Model):
"""A phone number.
All required parameters must be populated in order to send to Azure.
:param value: Required. The phone number in E.164 format.
:type value: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PhoneNumberIdentifierModel, self).__init__(**kwargs)
self.value = kwargs['value']
class RedisExportRDBCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.ExportRDBCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisExportRDBCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisImportRDBCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.ImportRDBCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisImportRDBCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisPatchingCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.PatchingCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisPatchingCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisScalingCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.ScalingCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisScalingCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class ResourceActionCancelData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceActionCancel event. This is raised when a resource action operation is canceled.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceActionFailureData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceActionFailure event. This is raised when a resource action operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceActionSuccessData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceActionSuccess event. This is raised when a resource action operation succeeds.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteCancelData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceDeleteCancel event. This is raised when a resource delete operation is canceled.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteFailureData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceDeleteFailure event. This is raised when a resource delete operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteSuccessData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceDeleteSuccess event. This is raised when a resource delete operation succeeds.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteCancelData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceWriteCancel event. This is raised when a resource create or update operation is canceled.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteFailureData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceWriteFailure event. This is raised when a resource create or update operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteSuccessData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceWriteSuccess event. This is raised when a resource create or update operation succeeds.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ServiceBusActiveMessagesAvailablePeriodicNotificationsEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.ActiveMessagesAvailablePeriodicNotifications event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusActiveMessagesAvailablePeriodicNotificationsEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusActiveMessagesAvailableWithNoListenersEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.ActiveMessagesAvailableWithNoListeners event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusActiveMessagesAvailableWithNoListenersEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusDeadletterMessagesAvailablePeriodicNotificationsEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.DeadletterMessagesAvailablePeriodicNotifications event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusDeadletterMessagesAvailablePeriodicNotificationsEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusDeadletterMessagesAvailableWithNoListenersEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.DeadletterMessagesAvailableWithNoListenersEvent event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusDeadletterMessagesAvailableWithNoListenersEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class SignalRServiceClientConnectionConnectedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.SignalRService.ClientConnectionConnected event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param hub_name: The hub of connected client connection.
:type hub_name: str
:param connection_id: The connection Id of connected client connection.
:type connection_id: str
:param user_id: The user Id of connected client connection.
:type user_id: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'connection_id': {'key': 'connectionId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SignalRServiceClientConnectionConnectedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.hub_name = kwargs.get('hub_name', None)
self.connection_id = kwargs.get('connection_id', None)
self.user_id = kwargs.get('user_id', None)
class SignalRServiceClientConnectionDisconnectedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.SignalRService.ClientConnectionDisconnected event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param hub_name: The hub of connected client connection.
:type hub_name: str
:param connection_id: The connection Id of connected client connection.
:type connection_id: str
:param user_id: The user Id of connected client connection.
:type user_id: str
:param error_message: The message of error that cause the client connection disconnected.
:type error_message: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'connection_id': {'key': 'connectionId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SignalRServiceClientConnectionDisconnectedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.hub_name = kwargs.get('hub_name', None)
self.connection_id = kwargs.get('connection_id', None)
self.user_id = kwargs.get('user_id', None)
self.error_message = kwargs.get('error_message', None)
class StorageAsyncOperationInitiatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.AsyncOperationInitiated event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param content_length: The size of the blob in bytes. This is the same as what would be
returned in the Content-Length header from the blob.
:type content_length: long
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageAsyncOperationInitiatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobCreated event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param e_tag: The etag of the blob at the time this event was triggered.
:type e_tag: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param content_length: The size of the blob in bytes. This is the same as what would be
returned in the Content-Length header from the blob.
:type content_length: long
:param content_offset: The offset of the blob in bytes.
:type content_offset: long
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'content_offset': {'key': 'contentOffset', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobCreatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.e_tag = kwargs.get('e_tag', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.content_offset = kwargs.get('content_offset', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobDeleted event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobDeletedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobRenamedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobRenamed event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param source_url: The path to the blob that was renamed.
:type source_url: str
:param destination_url: The new path to the blob after the rename operation.
:type destination_url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'destination_url': {'key': 'destinationUrl', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobRenamedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.source_url = kwargs.get('source_url', None)
self.destination_url = kwargs.get('destination_url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobTierChangedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobTierChanged event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param content_length: The size of the blob in bytes. This is the same as what would be
returned in the Content-Length header from the blob.
:type content_length: long
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobTierChangedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.DirectoryCreated event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param e_tag: The etag of the directory at the time this event was triggered.
:type e_tag: str
:param url: The path to the directory.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular directory name. Users can use standard string comparison to understand the relative
sequence of two events on the same directory name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryCreatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.e_tag = kwargs.get('e_tag', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.DirectoryDeleted event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param url: The path to the deleted directory.
:type url: str
:param recursive: Is this event for a recursive delete operation.
:type recursive: bool
:param sequencer: An opaque string value representing the logical sequence of events for any
particular directory name. Users can use standard string comparison to understand the relative
sequence of two events on the same directory name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'recursive': {'key': 'recursive', 'type': 'bool'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryDeletedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.url = kwargs.get('url', None)
self.recursive = kwargs.get('recursive', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryRenamedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.DirectoryRenamed event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param source_url: The path to the directory that was renamed.
:type source_url: str
:param destination_url: The new path to the directory after the rename operation.
:type destination_url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular directory name. Users can use standard string comparison to understand the relative
sequence of two events on the same directory name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'destination_url': {'key': 'destinationUrl', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryRenamedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.source_url = kwargs.get('source_url', None)
self.destination_url = kwargs.get('destination_url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageLifecyclePolicyActionSummaryDetail(msrest.serialization.Model):
"""Execution statistics of a specific policy action in a Blob Management cycle.
:param total_objects_count: Total number of objects to be acted on by this action.
:type total_objects_count: long
:param success_count: Number of success operations of this action.
:type success_count: long
:param error_list: Error messages of this action if any.
:type error_list: str
"""
_attribute_map = {
'total_objects_count': {'key': 'totalObjectsCount', 'type': 'long'},
'success_count': {'key': 'successCount', 'type': 'long'},
'error_list': {'key': 'errorList', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(StorageLifecyclePolicyActionSummaryDetail, self).__init__(**kwargs)
self.total_objects_count = kwargs.get('total_objects_count', None)
self.success_count = kwargs.get('success_count', None)
self.error_list = kwargs.get('error_list', None)
class StorageLifecyclePolicyCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.LifecyclePolicyCompleted event.
:param schedule_time: The time the policy task was scheduled.
:type schedule_time: str
:param delete_summary: Execution statistics of a specific policy action in a Blob Management
cycle.
:type delete_summary:
~event_grid_publisher_client.models.StorageLifecyclePolicyActionSummaryDetail
:param tier_to_cool_summary: Execution statistics of a specific policy action in a Blob
Management cycle.
:type tier_to_cool_summary:
~event_grid_publisher_client.models.StorageLifecyclePolicyActionSummaryDetail
:param tier_to_archive_summary: Execution statistics of a specific policy action in a Blob
Management cycle.
:type tier_to_archive_summary:
~event_grid_publisher_client.models.StorageLifecyclePolicyActionSummaryDetail
"""
_attribute_map = {
'schedule_time': {'key': 'scheduleTime', 'type': 'str'},
'delete_summary': {'key': 'deleteSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
'tier_to_cool_summary': {'key': 'tierToCoolSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
'tier_to_archive_summary': {'key': 'tierToArchiveSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
}
def __init__(
self,
**kwargs
):
super(StorageLifecyclePolicyCompletedEventData, self).__init__(**kwargs)
self.schedule_time = kwargs.get('schedule_time', None)
self.delete_summary = kwargs.get('delete_summary', None)
self.tier_to_cool_summary = kwargs.get('tier_to_cool_summary', None)
self.tier_to_archive_summary = kwargs.get('tier_to_archive_summary', None)
class SubscriptionDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.EventGrid.SubscriptionDeletedEvent event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar event_subscription_id: The Azure resource ID of the deleted event subscription.
:vartype event_subscription_id: str
"""
_validation = {
'event_subscription_id': {'readonly': True},
}
_attribute_map = {
'event_subscription_id': {'key': 'eventSubscriptionId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionDeletedEventData, self).__init__(**kwargs)
self.event_subscription_id = None
class SubscriptionValidationEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.EventGrid.SubscriptionValidationEvent event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar validation_code: The validation code sent by Azure Event Grid to validate an event
subscription. To complete the validation handshake, the subscriber must either respond with
this validation code as part of the validation response, or perform a GET request on the
validationUrl (available starting version 2018-05-01-preview).
:vartype validation_code: str
:ivar validation_url: The validation URL sent by Azure Event Grid (available starting version
2018-05-01-preview). To complete the validation handshake, the subscriber must either respond
with the validationCode as part of the validation response, or perform a GET request on the
validationUrl (available starting version 2018-05-01-preview).
:vartype validation_url: str
"""
_validation = {
'validation_code': {'readonly': True},
'validation_url': {'readonly': True},
}
_attribute_map = {
'validation_code': {'key': 'validationCode', 'type': 'str'},
'validation_url': {'key': 'validationUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionValidationEventData, self).__init__(**kwargs)
self.validation_code = None
self.validation_url = None
class SubscriptionValidationResponse(msrest.serialization.Model):
"""To complete an event subscription validation handshake, a subscriber can use either the validationCode or the validationUrl received in a SubscriptionValidationEvent. When the validationCode is used, the SubscriptionValidationResponse can be used to build the response.
:param validation_response: The validation response sent by the subscriber to Azure Event Grid
to complete the validation of an event subscription.
:type validation_response: str
"""
_attribute_map = {
'validation_response': {'key': 'validationResponse', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionValidationResponse, self).__init__(**kwargs)
self.validation_response = kwargs.get('validation_response', None)
class WebAppServicePlanUpdatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.AppServicePlanUpdated event.
:param app_service_plan_event_type_detail: Detail of action on the app service plan.
:type app_service_plan_event_type_detail:
~event_grid_publisher_client.models.AppServicePlanEventTypeDetail
:param sku: sku of app service plan.
:type sku: ~event_grid_publisher_client.models.WebAppServicePlanUpdatedEventDataSku
:param name: name of the app service plan that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the app
service plan API operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
app service plan API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the app service plan API
operation that triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_service_plan_event_type_detail': {'key': 'appServicePlanEventTypeDetail', 'type': 'AppServicePlanEventTypeDetail'},
'sku': {'key': 'sku', 'type': 'WebAppServicePlanUpdatedEventDataSku'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppServicePlanUpdatedEventData, self).__init__(**kwargs)
self.app_service_plan_event_type_detail = kwargs.get('app_service_plan_event_type_detail', None)
self.sku = kwargs.get('sku', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebAppServicePlanUpdatedEventDataSku(msrest.serialization.Model):
"""sku of app service plan.
:param name: name of app service plan sku.
:type name: str
:param tier: tier of app service plan sku.
:type tier: str
:param size: size of app service plan sku.
:type size: str
:param family: family of app service plan sku.
:type family: str
:param capacity: capacity of app service plan sku.
:type capacity: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'Tier', 'type': 'str'},
'size': {'key': 'Size', 'type': 'str'},
'family': {'key': 'Family', 'type': 'str'},
'capacity': {'key': 'Capacity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppServicePlanUpdatedEventDataSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.size = kwargs.get('size', None)
self.family = kwargs.get('family', None)
self.capacity = kwargs.get('capacity', None)
class WebAppUpdatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.AppUpdated event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppUpdatedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.BackupOperationCompleted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationFailedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.BackupOperationFailed event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.BackupOperationStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.RestoreOperationCompleted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationFailedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.RestoreOperationFailed event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.RestoreOperationStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapCompleted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapFailedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapFailed event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapWithPreviewCancelledEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapWithPreviewCancelled event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapWithPreviewCancelledEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapWithPreviewStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapWithPreviewStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapWithPreviewStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None) | _attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'error': {'key': 'error', 'type': 'MediaJobError'}, |
test_rt_valid_model_gaussian_mixture.py | """
@brief test log(time=16s)
"""
import unittest
from logging import getLogger
from pandas import DataFrame
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import ExtTestCase
from pyquickhelper.pandashelper import df2rst
from sklearn.exceptions import ConvergenceWarning
try:
from sklearn.utils._testing import ignore_warnings
except ImportError:
from sklearn.utils.testing import ignore_warnings
from skl2onnx import __version__ as skl2onnx_version
from mlprodict.onnxrt.validate import enumerate_validated_operator_opsets, summary_report
from mlprodict.onnxrt.doc.doc_write_helper import split_columns_subsets
class TestRtValidateGaussianMixture(ExtTestCase):
@ignore_warnings(category=(UserWarning, ConvergenceWarning, RuntimeWarning))
def test_rt_GaussianMixture_python(self):
fLOG(__file__, self._testMethodName, OutputPrint=__name__ == "__main__")
logger = getLogger('skl2onnx')
logger.disabled = True
verbose = 1 if __name__ == "__main__" else 0
debug = False
buffer = []
def myprint(*args, **kwargs):
buffer.append(" ".join(map(str, args)))
rows = list(enumerate_validated_operator_opsets(
verbose, models={"GaussianMixture"}, opset_min=9,
opset_max=11, fLOG=myprint,
runtime='python', debug=debug,
filter_exp=lambda m, p: 'mix' in p))
self.assertGreater(len(rows), 1)
self.assertIn('skl_nop', rows[-1])
keys = set()
for row in rows:
keys.update(set(row))
self.assertIn('onx_size', keys)
piv = summary_report(DataFrame(rows))
opset = [c for c in piv.columns if 'opset' in c]
self.assertTrue('opset11' in opset or 'opset10' in opset)
self.assertGreater(len(buffer), 1 if debug else 0)
common, subsets = split_columns_subsets(piv)
try:
conv = df2rst(piv, split_col_common=common, # pylint: disable=E1123
split_col_subsets=subsets)
self.assertIn('| GaussianMixture |', conv)
except TypeError as e:
if "got an unexpected keyword argument 'split_col_common'" in str(e):
return
raise e
if __name__ == "__main__":
| unittest.main() |
|
stack.go | package log
import (
"fmt"
"github.com/andypangaribuan/vision-go/models"
"github.com/andypangaribuan/vision-go/vis"
"os"
"reflect"
"runtime"
)
/* ============================================
Created by andy pangaribuan on 2021/05/18
Copyright andypangaribuan. All rights reserved.
============================================ */
func (slf *logStruct) Stack(args ...interface{}) (stack *string) {
v := slf.BaseStack(2, args...)
stack = &v
return
}
func (*logStruct) BaseStack(skip int, args ...interface{}) (stack string) {
pc, filePath, lineNumber, _ := runtime.Caller(skip)
funcName := runtime.FuncForPC(pc).Name()
format := ":: %s \n:: %s:%d"
data := []interface{}{funcName, filePath, lineNumber}
for _, arg := range args {
switch v := arg.(type) {
case error:
format += "\n:: %v"
data = append(data, v)
}
}
if _, err := os.Stat(filePath); !os.IsNotExist(err) {
if codes := readFile(filePath, lineNumber); codes != "" {
format += "\n:: START CODE STACK\n"
format += "%v"
format += "\n:: END CODE STACK"
data = append(data, codes)
}
}
for _, v := range args {
format += "\n\n>_\n"
format += "%+v"
data = append(data, getLogValue(v))
}
format += "\n\n|=|"
stack = fmt.Sprintf(format, data...)
stack = vis.Util.RemoveInvalidChar(stack)
return
}
func getLogValue(obj interface{}) string | {
if obj == nil {
return "nil"
}
objRef := reflect.ValueOf(obj)
objKind := objRef.Kind()
if objKind == reflect.Ptr {
objRef = objRef.Elem()
objKind = objRef.Kind()
}
if objKind == reflect.Invalid {
return "nil"
}
obj = objRef.Interface()
value := ""
switch data := obj.(type) {
case string: value = data
case []byte: value = string(data)
case error: value = codeStack(data) + "\n\n" + fmt.Sprintf("%+v", data)
case models.DbTxError:
if data.Msg != "" {
value = data.Msg
}
if data.Err != nil {
if value != "" {
value += "\n\n"
}
value += codeStack(data.Err) + "\n\n" + fmt.Sprintf("%+v", data.Err)
}
default:
if content, err := vis.Json.Encode(data); err == nil && content != "" {
value = content
}
if value == "" || value == "{}" {
value = fmt.Sprintf("%+v", data)
}
}
return value
} |
|
text.py | # Copyright 2013 Tiago Barroso
# Copyright 2013 Frank Kmiec
# Copyright 2013-2016 Aleksej
# Copyright 2017 Christian Weiß
# Copyright 2018 Timothée Chauvin
# Copyright 2017-2018 Joseph Lorimer <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright
# notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
from collections import defaultdict
from anki.notes import Note
from aqt import mw
from aqt.addcards import AddCards
from aqt.editcurrent import EditCurrent
from aqt.utils import getText, showInfo, showWarning, tooltip
from .util import fixImages, getField, setField
SCHEDULE_EXTRACT = 0
class TextManager:
def __init__(self):
self.history = defaultdict(list)
def highlight(self, bgColor=None, textColor=None):
if not bgColor:
bgColor = self.settings['highlightBgColor']
if not textColor:
textColor = self.settings['highlightTextColor']
script = "highlight('%s', '%s')" % (bgColor, textColor)
mw.web.eval(script)
self.save()
def format(self, style):
mw.web.eval('format("%s")' % style)
self.save()
def toggleOverlay(self):
mw.web.eval('toggleOverlay()')
self.save()
def extract(self, settings=None):
if not settings:
settings = self.settings
if not mw.web.selectedText() and not settings['editExtract']:
showInfo('Please select some text to extract.')
return
if settings['plainText']:
mw.web.evalWithCallback(
'getPlainText()',
lambda text: self.create(text, settings))
else:
mw.web.evalWithCallback(
'getHtmlText()',
lambda text: self.create(text, settings))
def create(self, text, settings):
currentCard = mw.reviewer.card
currentNote = currentCard.note()
model = mw.col.models.byName(settings['modelName'])
newNote = Note(mw.col, model)
newNote.tags = currentNote.tags
setField(newNote, settings['textField'], fixImages(text))
if settings['extractDeck']:
deck = mw.col.decks.byName(settings['extractDeck'])
if not deck:
showWarning('Destination deck no longer exists. '
'Please update your settings.')
return
did = deck['id']
else:
did = currentCard.did
if settings['isQuickKey']:
newNote.tags += settings['tags']
if settings['sourceField']:
setField(newNote,
settings['sourceField'],
getField(currentNote, self.settings['sourceField']))
if settings['editExtract']:
highlight = self._editExtract(newNote, did, settings)
else:
highlight = True
newNote.model()['did'] = did
mw.col.addNote(newNote)
else:
if settings['copyTitle']:
title = getField(currentNote, settings['titleField'])
else:
title = ''
setField(newNote,
settings['sourceField'],
getField(currentNote, settings['sourceField']))
if settings['prioEnabled']:
setField(newNote,
settings['priorityField'],
getField(currentNote, settings['priorityField']))
if settings['editExtract']:
setField(newNote, settings['titleField'], title)
highlight = self._editExtract(newNote, did, settings)
else:
highlight = self._getTitle(newNote, did, title, settings)
if settings['scheduleExtract'] and not settings['prioEnabled']:
cards = newNote.cards()
if cards:
mw.readingManager.scheduler.answer(
cards[0], SCHEDULE_EXTRACT)
if highlight:
self.highlight(settings['extractBgColor'],
settings['extractTextColor'])
if settings['editSource']:
EditCurrent(mw)
def _editExtract(self, note, did, settings):
def onAdd():
addCards.rejected.disconnect(self.undo)
addCards.reject()
addCards = AddCards(mw)
addCards.rejected.connect(self.undo)
addCards.addButton.clicked.connect(onAdd)
addCards.editor.setNote(note, focusTo=0)
deckName = mw.col.decks.get(did)['name']
addCards.deckChooser.setDeckName(deckName)
addCards.modelChooser.models.setText(settings['modelName'])
return True
def _getTitle(self, note, did, title, settings):
title, accepted = getText(
'Enter title', title='Extract Text', default=title)
if accepted:
setField(note, settings['titleField'], title)
note.model()['did'] = did
mw.col.addNote(note)
return accepted
def remove(self):
mw.web.eval('removeText()')
self.save()
def undo(self):
no | def save(self):
def callback(text):
if text:
note = mw.reviewer.card.note()
self.history[note.id].append(note['Text'])
note['Text'] = text
note.flush()
mw.web.evalWithCallback(
'document.getElementsByClassName("ir-text")[0].innerHTML;',
callback)
| te = mw.reviewer.card.note()
if note.id not in self.history or not self.history[note.id]:
showInfo('No undo history for this note.')
return
note['Text'] = self.history[note.id].pop()
note.flush()
mw.reset()
tooltip('Undone')
|
tx_pool.go | // Copyright 2014 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package core
import (
"errors"
"fmt"
"math"
"math/big"
"sort"
"sync"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/prque"
"github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/metrics"
"github.com/ethereum/go-ethereum/params"
)
const (
// chainHeadChanSize is the size of channel listening to ChainHeadEvent.
chainHeadChanSize = 10
)
var (
// ErrInvalidSender is returned if the transaction contains an invalid signature.
ErrInvalidSender = errors.New("invalid sender")
// ErrNonceTooLow is returned if the nonce of a transaction is lower than the
// one present in the local chain.
ErrNonceTooLow = errors.New("nonce too low")
// ErrUnderpriced is returned if a transaction's gas price is below the minimum
// configured for the transaction pool.
ErrUnderpriced = errors.New("transaction underpriced")
// ErrReplaceUnderpriced is returned if a transaction is attempted to be replaced
// with a different one without the required price bump.
ErrReplaceUnderpriced = errors.New("replacement transaction underpriced")
// ErrInsufficientFunds is returned if the total cost of executing a transaction
// is higher than the balance of the user's account.
ErrInsufficientFunds = errors.New("insufficient funds for gas * price + value")
// ErrIntrinsicGas is returned if the transaction is specified to use less gas
// than required to start the invocation.
ErrIntrinsicGas = errors.New("intrinsic gas too low")
// ErrGasLimit is returned if a transaction's requested gas limit exceeds the
// maximum allowance of the current block.
ErrGasLimit = errors.New("exceeds block gas limit")
// ErrNegativeValue is a sanity error to ensure noone is able to specify a
// transaction with a negative value.
ErrNegativeValue = errors.New("negative value")
// ErrOversizedData is returned if the input data of a transaction is greater
// than some meaningful limit a user might use. This is not a consensus error
// making the transaction invalid, rather a DOS protection.
ErrOversizedData = errors.New("oversized data")
)
var (
evictionInterval = time.Minute // Time interval to check for evictable transactions
statsReportInterval = 8 * time.Second // Time interval to report transaction pool stats
)
var (
// Metrics for the pending pool
pendingDiscardMeter = metrics.NewRegisteredMeter("txpool/pending/discard", nil)
pendingReplaceMeter = metrics.NewRegisteredMeter("txpool/pending/replace", nil)
pendingRateLimitMeter = metrics.NewRegisteredMeter("txpool/pending/ratelimit", nil) // Dropped due to rate limiting
pendingNofundsMeter = metrics.NewRegisteredMeter("txpool/pending/nofunds", nil) // Dropped due to out-of-funds
// Metrics for the queued pool
queuedDiscardMeter = metrics.NewRegisteredMeter("txpool/queued/discard", nil)
queuedReplaceMeter = metrics.NewRegisteredMeter("txpool/queued/replace", nil)
queuedRateLimitMeter = metrics.NewRegisteredMeter("txpool/queued/ratelimit", nil) // Dropped due to rate limiting
queuedNofundsMeter = metrics.NewRegisteredMeter("txpool/queued/nofunds", nil) // Dropped due to out-of-funds
// General tx metrics
knownTxMeter = metrics.NewRegisteredMeter("txpool/known", nil)
validTxMeter = metrics.NewRegisteredMeter("txpool/valid", nil)
invalidTxMeter = metrics.NewRegisteredMeter("txpool/invalid", nil)
underpricedTxMeter = metrics.NewRegisteredMeter("txpool/underpriced", nil)
pendingGauge = metrics.NewRegisteredGauge("txpool/pending", nil)
queuedGauge = metrics.NewRegisteredGauge("txpool/queued", nil)
localGauge = metrics.NewRegisteredGauge("txpool/local", nil)
)
// TxStatus is the current status of a transaction as seen by the pool.
type TxStatus uint
const (
TxStatusUnknown TxStatus = iota
TxStatusQueued
TxStatusPending
TxStatusIncluded
)
// blockChain provides the state of blockchain and current gas limit to do
// some pre checks in tx pool and event subscribers.
type blockChain interface {
CurrentBlock() *types.Block
GetBlock(hash common.Hash, number uint64) *types.Block
StateAt(root common.Hash) (*state.StateDB, error)
SubscribeChainHeadEvent(ch chan<- ChainHeadEvent) event.Subscription
}
// TxPoolConfig are the configuration parameters of the transaction pool.
type TxPoolConfig struct {
Locals []common.Address // Addresses that should be treated by default as local
NoLocals bool // Whether local transaction handling should be disabled
Journal string // Journal of local transactions to survive node restarts
Rejournal time.Duration // Time interval to regenerate the local transaction journal
PriceLimit uint64 // Minimum gas price to enforce for acceptance into the pool
PriceBump uint64 // Minimum price bump percentage to replace an already existing transaction (nonce)
AccountSlots uint64 // Number of executable transaction slots guaranteed per account
GlobalSlots uint64 // Maximum number of executable transaction slots for all accounts
AccountQueue uint64 // Maximum number of non-executable transaction slots permitted per account
GlobalQueue uint64 // Maximum number of non-executable transaction slots for all accounts
Lifetime time.Duration // Maximum amount of time non-executable transaction are queued
}
// DefaultTxPoolConfig contains the default configurations for the transaction
// pool.
var DefaultTxPoolConfig = TxPoolConfig{
Journal: "transactions.rlp",
Rejournal: time.Hour,
PriceLimit: 1,
PriceBump: 10,
AccountSlots: 16,
GlobalSlots: 4096,
AccountQueue: 64,
GlobalQueue: 1024,
Lifetime: 3 * time.Hour,
}
// sanitize checks the provided user configurations and changes anything that's
// unreasonable or unworkable.
func (config *TxPoolConfig) sanitize() TxPoolConfig {
conf := *config
if conf.Rejournal < time.Second {
log.Warn("Sanitizing invalid txpool journal time", "provided", conf.Rejournal, "updated", time.Second)
conf.Rejournal = time.Second
}
if conf.PriceLimit < 1 {
log.Warn("Sanitizing invalid txpool price limit", "provided", conf.PriceLimit, "updated", DefaultTxPoolConfig.PriceLimit)
conf.PriceLimit = DefaultTxPoolConfig.PriceLimit
}
if conf.PriceBump < 1 {
log.Warn("Sanitizing invalid txpool price bump", "provided", conf.PriceBump, "updated", DefaultTxPoolConfig.PriceBump)
conf.PriceBump = DefaultTxPoolConfig.PriceBump
}
if conf.AccountSlots < 1 {
log.Warn("Sanitizing invalid txpool account slots", "provided", conf.AccountSlots, "updated", DefaultTxPoolConfig.AccountSlots)
conf.AccountSlots = DefaultTxPoolConfig.AccountSlots
}
if conf.GlobalSlots < 1 {
log.Warn("Sanitizing invalid txpool global slots", "provided", conf.GlobalSlots, "updated", DefaultTxPoolConfig.GlobalSlots)
conf.GlobalSlots = DefaultTxPoolConfig.GlobalSlots
}
if conf.AccountQueue < 1 {
log.Warn("Sanitizing invalid txpool account queue", "provided", conf.AccountQueue, "updated", DefaultTxPoolConfig.AccountQueue)
conf.AccountQueue = DefaultTxPoolConfig.AccountQueue
}
if conf.GlobalQueue < 1 {
log.Warn("Sanitizing invalid txpool global queue", "provided", conf.GlobalQueue, "updated", DefaultTxPoolConfig.GlobalQueue)
conf.GlobalQueue = DefaultTxPoolConfig.GlobalQueue
}
if conf.Lifetime < 1 {
log.Warn("Sanitizing invalid txpool lifetime", "provided", conf.Lifetime, "updated", DefaultTxPoolConfig.Lifetime)
conf.Lifetime = DefaultTxPoolConfig.Lifetime
}
return conf
}
// TxPool contains all currently known transactions. Transactions
// enter the pool when they are received from the network or submitted
// locally. They exit the pool when they are included in the blockchain.
//
// The pool separates processable transactions (which can be applied to the
// current state) and future transactions. Transactions move between those
// two states over time as they are received and processed.
type TxPool struct {
config TxPoolConfig
chainconfig *params.ChainConfig
chain blockChain
gasPrice *big.Int
txFeed event.Feed
scope event.SubscriptionScope
signer types.Signer
mu sync.RWMutex
istanbul bool // Fork indicator whether we are in the istanbul stage.
currentState *state.StateDB // Current state in the blockchain head
pendingNonces *txNoncer // Pending state tracking virtual nonces
currentMaxGas uint64 // Current gas limit for transaction caps
locals *accountSet // Set of local transaction to exempt from eviction rules
journal *txJournal // Journal of local transaction to back up to disk
pending map[common.Address]*txList // All currently processable transactions
queue map[common.Address]*txList // Queued but non-processable transactions
beats map[common.Address]time.Time // Last heartbeat from each known account
all *txLookup // All transactions to allow lookups
priced *txPricedList // All transactions sorted by price
chainHeadCh chan ChainHeadEvent
chainHeadSub event.Subscription
reqResetCh chan *txpoolResetRequest
reqPromoteCh chan *accountSet
queueTxEventCh chan *types.Transaction
reorgDoneCh chan chan struct{}
reorgShutdownCh chan struct{} // requests shutdown of scheduleReorgLoop
wg sync.WaitGroup // tracks loop, scheduleReorgLoop
}
type txpoolResetRequest struct {
oldHead, newHead *types.Header
}
// NewTxPool creates a new transaction pool to gather, sort and filter inbound
// transactions from the network.
func NewTxPool(config TxPoolConfig, chainconfig *params.ChainConfig, chain blockChain) *TxPool {
// Sanitize the input to ensure no vulnerable gas prices are set
config = (&config).sanitize()
// Create the transaction pool with its initial settings
pool := &TxPool{
config: config,
chainconfig: chainconfig,
chain: chain,
signer: types.NewEIP155Signer(chainconfig.ChainID),
pending: make(map[common.Address]*txList),
queue: make(map[common.Address]*txList),
beats: make(map[common.Address]time.Time),
all: newTxLookup(),
chainHeadCh: make(chan ChainHeadEvent, chainHeadChanSize),
reqResetCh: make(chan *txpoolResetRequest),
reqPromoteCh: make(chan *accountSet),
queueTxEventCh: make(chan *types.Transaction),
reorgDoneCh: make(chan chan struct{}),
reorgShutdownCh: make(chan struct{}),
gasPrice: new(big.Int).SetUint64(config.PriceLimit),
}
pool.locals = newAccountSet(pool.signer)
for _, addr := range config.Locals {
log.Info("Setting new local account", "address", addr)
pool.locals.add(addr)
}
pool.priced = newTxPricedList(pool.all)
pool.reset(nil, chain.CurrentBlock().Header())
// Start the reorg loop early so it can handle requests generated during journal loading.
pool.wg.Add(1)
go pool.scheduleReorgLoop()
// If local transactions and journaling is enabled, load from disk
if !config.NoLocals && config.Journal != "" {
pool.journal = newTxJournal(config.Journal)
if err := pool.journal.load(pool.AddLocals); err != nil {
log.Warn("Failed to load transaction journal", "err", err)
}
if err := pool.journal.rotate(pool.local()); err != nil {
log.Warn("Failed to rotate transaction journal", "err", err)
}
}
// Subscribe events from blockchain and start the main event loop.
pool.chainHeadSub = pool.chain.SubscribeChainHeadEvent(pool.chainHeadCh)
pool.wg.Add(1)
go pool.loop()
return pool
}
// loop is the transaction pool's main event loop, waiting for and reacting to
// outside blockchain events as well as for various reporting and transaction
// eviction events.
func (pool *TxPool) loop() {
defer pool.wg.Done()
var (
prevPending, prevQueued, prevStales int
// Start the stats reporting and transaction eviction tickers
report = time.NewTicker(statsReportInterval)
evict = time.NewTicker(evictionInterval)
journal = time.NewTicker(pool.config.Rejournal)
// Track the previous head headers for transaction reorgs
head = pool.chain.CurrentBlock()
)
defer report.Stop()
defer evict.Stop()
defer journal.Stop()
for {
select {
// Handle ChainHeadEvent
case ev := <-pool.chainHeadCh:
if ev.Block != nil {
pool.requestReset(head.Header(), ev.Block.Header())
head = ev.Block
}
// System shutdown.
case <-pool.chainHeadSub.Err():
close(pool.reorgShutdownCh)
return
// Handle stats reporting ticks
case <-report.C:
pool.mu.RLock()
pending, queued := pool.stats()
stales := pool.priced.stales
pool.mu.RUnlock()
if pending != prevPending || queued != prevQueued || stales != prevStales {
log.Debug("Transaction pool status report", "executable", pending, "queued", queued, "stales", stales)
prevPending, prevQueued, prevStales = pending, queued, stales
}
// Handle inactive account transaction eviction
case <-evict.C:
pool.mu.Lock()
for addr := range pool.queue {
// Skip local transactions from the eviction mechanism
if pool.locals.contains(addr) {
continue
}
// Any non-locals old enough should be removed
if time.Since(pool.beats[addr]) > pool.config.Lifetime {
for _, tx := range pool.queue[addr].Flatten() {
pool.removeTx(tx.Hash(), true)
}
}
}
pool.mu.Unlock()
// Handle local transaction journal rotation
case <-journal.C:
if pool.journal != nil {
pool.mu.Lock()
if err := pool.journal.rotate(pool.local()); err != nil {
log.Warn("Failed to rotate local tx journal", "err", err)
}
pool.mu.Unlock()
}
}
}
}
// Stop terminates the transaction pool.
func (pool *TxPool) Stop() {
// Unsubscribe all subscriptions registered from txpool
pool.scope.Close()
// Unsubscribe subscriptions registered from blockchain
pool.chainHeadSub.Unsubscribe()
pool.wg.Wait()
if pool.journal != nil {
pool.journal.close()
}
log.Info("Transaction pool stopped")
}
// SubscribeNewTxsEvent registers a subscription of NewTxsEvent and
// starts sending event to the given channel.
func (pool *TxPool) SubscribeNewTxsEvent(ch chan<- NewTxsEvent) event.Subscription {
return pool.scope.Track(pool.txFeed.Subscribe(ch))
}
// GasPrice returns the current gas price enforced by the transaction pool.
func (pool *TxPool) GasPrice() *big.Int {
pool.mu.RLock()
defer pool.mu.RUnlock()
return new(big.Int).Set(pool.gasPrice)
}
// SetGasPrice updates the minimum price required by the transaction pool for a
// new transaction, and drops all transactions below this threshold.
func (pool *TxPool) SetGasPrice(price *big.Int) {
pool.mu.Lock()
defer pool.mu.Unlock()
pool.gasPrice = price
for _, tx := range pool.priced.Cap(price, pool.locals) {
pool.removeTx(tx.Hash(), false)
}
log.Info("Transaction pool price threshold updated", "price", price)
}
// Nonce returns the next nonce of an account, with all transactions executable
// by the pool already applied on top.
func (pool *TxPool) Nonce(addr common.Address) uint64 {
pool.mu.RLock()
defer pool.mu.RUnlock()
return pool.pendingNonces.get(addr)
}
// Stats retrieves the current pool stats, namely the number of pending and the
// number of queued (non-executable) transactions.
func (pool *TxPool) Stats() (int, int) {
pool.mu.RLock()
defer pool.mu.RUnlock()
return pool.stats()
}
// stats retrieves the current pool stats, namely the number of pending and the
// number of queued (non-executable) transactions.
func (pool *TxPool) stats() (int, int) {
pending := 0
for _, list := range pool.pending {
pending += list.Len()
}
queued := 0
for _, list := range pool.queue {
queued += list.Len()
}
return pending, queued
}
// Content retrieves the data content of the transaction pool, returning all the
// pending as well as queued transactions, grouped by account and sorted by nonce.
func (pool *TxPool) Content() (map[common.Address]types.Transactions, map[common.Address]types.Transactions) {
pool.mu.Lock()
defer pool.mu.Unlock()
pending := make(map[common.Address]types.Transactions)
for addr, list := range pool.pending {
pending[addr] = list.Flatten()
}
queued := make(map[common.Address]types.Transactions)
for addr, list := range pool.queue {
queued[addr] = list.Flatten()
}
return pending, queued
}
// Pending retrieves all currently processable transactions, grouped by origin
// account and sorted by nonce. The returned transaction set is a copy and can be
// freely modified by calling code.
func (pool *TxPool) Pending() (map[common.Address]types.Transactions, error) {
pool.mu.Lock()
defer pool.mu.Unlock()
pending := make(map[common.Address]types.Transactions)
for addr, list := range pool.pending {
pending[addr] = list.Flatten()
}
return pending, nil
}
// Locals retrieves the accounts currently considered local by the pool.
func (pool *TxPool) Locals() []common.Address {
pool.mu.Lock()
defer pool.mu.Unlock()
return pool.locals.flatten()
}
// local retrieves all currently known local transactions, grouped by origin
// account and sorted by nonce. The returned transaction set is a copy and can be
// freely modified by calling code.
func (pool *TxPool) local() map[common.Address]types.Transactions {
txs := make(map[common.Address]types.Transactions)
for addr := range pool.locals.accounts {
if pending := pool.pending[addr]; pending != nil {
txs[addr] = append(txs[addr], pending.Flatten()...)
}
if queued := pool.queue[addr]; queued != nil {
txs[addr] = append(txs[addr], queued.Flatten()...)
}
}
return txs
}
// validateTx checks whether a transaction is valid according to the consensus
// rules and adheres to some heuristic limits of the local node (price and size).
func (pool *TxPool) validateTx(tx *types.Transaction, local bool) error {
// Heuristic limit, reject transactions over 32KB to prevent DOS attacks
if tx.Size() > 32*1024 {
return ErrOversizedData
}
// Transactions can't be negative. This may never happen using RLP decoded
// transactions but may occur if you create a transaction using the RPC.
if tx.Value().Sign() < 0 {
return ErrNegativeValue
}
// Ensure the transaction doesn't exceed the current block limit gas.
if pool.currentMaxGas < tx.Gas() {
return ErrGasLimit
}
// Make sure the transaction is signed properly
from, err := types.Sender(pool.signer, tx)
if err != nil {
return ErrInvalidSender
}
// Drop non-local transactions under our own minimal accepted gas price
local = local || pool.locals.contains(from) // account may be local even if the transaction arrived from the network
if !local && pool.gasPrice.Cmp(tx.GasPrice()) > 0 {
return ErrUnderpriced
}
// Ensure the transaction adheres to nonce ordering
if pool.currentState.GetNonce(from) > tx.Nonce() {
return ErrNonceTooLow
}
// Transactor should have enough funds to cover the costs
// cost == V + GP * GL
if pool.currentState.GetBalance(from).Cmp(tx.Cost()) < 0 {
return ErrInsufficientFunds
}
// Ensure the transaction has more gas than the basic tx fee.
intrGas, err := IntrinsicGas(tx.Data(), tx.To() == nil, true, pool.istanbul)
if err != nil {
return err
}
if tx.Gas() < intrGas {
return ErrIntrinsicGas
}
return nil
}
// add validates a transaction and inserts it into the non-executable queue for later
// pending promotion and execution. If the transaction is a replacement for an already
// pending or queued one, it overwrites the previous transaction if its price is higher.
//
// If a newly added transaction is marked as local, its sending account will be
// whitelisted, preventing any associated transaction from being dropped out of the pool
// due to pricing constraints.
func (pool *TxPool) add(tx *types.Transaction, local bool) (replaced bool, err error) {
// If the transaction is already known, discard it
hash := tx.Hash()
if pool.all.Get(hash) != nil {
log.Trace("Discarding already known transaction", "hash", hash)
knownTxMeter.Mark(1)
return false, fmt.Errorf("known transaction: %x", hash)
}
// If the transaction fails basic validation, discard it
if err := pool.validateTx(tx, local); err != nil {
log.Trace("Discarding invalid transaction", "hash", hash, "err", err)
invalidTxMeter.Mark(1)
return false, err
}
// If the transaction pool is full, discard underpriced transactions
if uint64(pool.all.Count()) >= pool.config.GlobalSlots+pool.config.GlobalQueue {
// If the new transaction is underpriced, don't accept it
if !local && pool.priced.Underpriced(tx, pool.locals) {
log.Trace("Discarding underpriced transaction", "hash", hash, "price", tx.GasPrice())
underpricedTxMeter.Mark(1)
return false, ErrUnderpriced
}
// New transaction is better than our worse ones, make room for it
drop := pool.priced.Discard(pool.all.Count()-int(pool.config.GlobalSlots+pool.config.GlobalQueue-1), pool.locals)
for _, tx := range drop {
log.Trace("Discarding freshly underpriced transaction", "hash", tx.Hash(), "price", tx.GasPrice())
underpricedTxMeter.Mark(1)
pool.removeTx(tx.Hash(), false)
}
}
// Try to replace an existing transaction in the pending pool
from, _ := types.Sender(pool.signer, tx) // already validated
if list := pool.pending[from]; list != nil && list.Overlaps(tx) {
// Nonce already pending, check if required price bump is met
inserted, old := list.Add(tx, pool.config.PriceBump)
if !inserted {
pendingDiscardMeter.Mark(1)
return false, ErrReplaceUnderpriced
}
// New transaction is better, replace old one
if old != nil {
pool.all.Remove(old.Hash())
pool.priced.Removed(1)
pendingReplaceMeter.Mark(1)
}
pool.all.Add(tx)
pool.priced.Put(tx)
pool.journalTx(from, tx)
pool.queueTxEvent(tx)
log.Trace("Pooled new executable transaction", "hash", hash, "from", from, "to", tx.To())
return old != nil, nil
}
// New transaction isn't replacing a pending one, push into queue
replaced, err = pool.enqueueTx(hash, tx)
if err != nil {
return false, err
}
// Mark local addresses and journal local transactions
if local {
if !pool.locals.contains(from) {
log.Info("Setting new local account", "address", from)
pool.locals.add(from)
}
}
if local || pool.locals.contains(from) {
localGauge.Inc(1)
}
pool.journalTx(from, tx)
log.Trace("Pooled new future transaction", "hash", hash, "from", from, "to", tx.To())
return replaced, nil
}
// enqueueTx inserts a new transaction into the non-executable transaction queue.
//
// Note, this method assumes the pool lock is held!
func (pool *TxPool) enqueueTx(hash common.Hash, tx *types.Transaction) (bool, error) {
// Try to insert the transaction into the future queue
from, _ := types.Sender(pool.signer, tx) // already validated
if pool.queue[from] == nil {
pool.queue[from] = newTxList(false)
}
inserted, old := pool.queue[from].Add(tx, pool.config.PriceBump)
if !inserted {
// An older transaction was better, discard this
queuedDiscardMeter.Mark(1)
return false, ErrReplaceUnderpriced
}
// Discard any previous transaction and mark this
if old != nil {
pool.all.Remove(old.Hash())
pool.priced.Removed(1)
queuedReplaceMeter.Mark(1)
} else {
// Nothing was replaced, bump the queued counter
queuedGauge.Inc(1)
}
if pool.all.Get(hash) == nil {
pool.all.Add(tx)
pool.priced.Put(tx)
}
return old != nil, nil
}
// journalTx adds the specified transaction to the local disk journal if it is
// deemed to have been sent from a local account.
func (pool *TxPool) journalTx(from common.Address, tx *types.Transaction) {
// Only journal if it's enabled and the transaction is local
if pool.journal == nil || !pool.locals.contains(from) {
return
}
if err := pool.journal.insert(tx); err != nil {
log.Warn("Failed to journal local transaction", "err", err)
}
}
// promoteTx adds a transaction to the pending (processable) list of transactions
// and returns whether it was inserted or an older was better.
//
// Note, this method assumes the pool lock is held!
func (pool *TxPool) promoteTx(addr common.Address, hash common.Hash, tx *types.Transaction) bool {
// Try to insert the transaction into the pending queue
if pool.pending[addr] == nil {
pool.pending[addr] = newTxList(true)
}
list := pool.pending[addr]
inserted, old := list.Add(tx, pool.config.PriceBump)
if !inserted {
// An older transaction was better, discard this
pool.all.Remove(hash)
pool.priced.Removed(1)
pendingDiscardMeter.Mark(1)
return false
}
// Otherwise discard any previous transaction and mark this
if old != nil {
pool.all.Remove(old.Hash())
pool.priced.Removed(1)
pendingReplaceMeter.Mark(1)
} else {
// Nothing was replaced, bump the pending counter
pendingGauge.Inc(1)
}
// Failsafe to work around direct pending inserts (tests)
if pool.all.Get(hash) == nil {
pool.all.Add(tx)
pool.priced.Put(tx)
}
// Set the potentially new pending nonce and notify any subsystems of the new tx
pool.beats[addr] = time.Now()
pool.pendingNonces.set(addr, tx.Nonce()+1)
return true
}
// AddLocals enqueues a batch of transactions into the pool if they are valid, marking the
// senders as a local ones, ensuring they go around the local pricing constraints.
//
// This method is used to add transactions from the RPC API and performs synchronous pool
// reorganization and event propagation.
func (pool *TxPool) AddLocals(txs []*types.Transaction) []error {
return pool.addTxs(txs, !pool.config.NoLocals, true)
}
// AddLocal enqueues a single local transaction into the pool if it is valid. This is
// a convenience wrapper aroundd AddLocals.
func (pool *TxPool) AddLocal(tx *types.Transaction) error {
errs := pool.AddLocals([]*types.Transaction{tx})
return errs[0]
}
// AddRemotes enqueues a batch of transactions into the pool if they are valid. If the
// senders are not among the locally tracked ones, full pricing constraints will apply.
//
// This method is used to add transactions from the p2p network and does not wait for pool
// reorganization and internal event propagation.
func (pool *TxPool) AddRemotes(txs []*types.Transaction) []error {
return pool.addTxs(txs, false, false)
}
// This is like AddRemotes, but waits for pool reorganization. Tests use this method.
func (pool *TxPool) AddRemotesSync(txs []*types.Transaction) []error {
return pool.addTxs(txs, false, true)
}
// This is like AddRemotes with a single transaction, but waits for pool reorganization. Tests use this method.
func (pool *TxPool) addRemoteSync(tx *types.Transaction) error {
errs := pool.AddRemotesSync([]*types.Transaction{tx})
return errs[0]
}
// AddRemote enqueues a single transaction into the pool if it is valid. This is a convenience
// wrapper around AddRemotes.
//
// Deprecated: use AddRemotes
func (pool *TxPool) AddRemote(tx *types.Transaction) error {
errs := pool.AddRemotes([]*types.Transaction{tx})
return errs[0]
}
// addTxs attempts to queue a batch of transactions if they are valid.
func (pool *TxPool) addTxs(txs []*types.Transaction, local, sync bool) []error {
// Filter out known ones without obtaining the pool lock or recovering signatures
var (
errs = make([]error, len(txs))
news = make([]*types.Transaction, 0, len(txs))
)
for i, tx := range txs {
// If the transaction is known, pre-set the error slot
if pool.all.Get(tx.Hash()) != nil {
errs[i] = fmt.Errorf("known transaction: %x", tx.Hash())
knownTxMeter.Mark(1)
continue
}
// Accumulate all unknown transactions for deeper processing
news = append(news, tx)
}
if len(news) == 0 {
return errs
}
// Cache senders in transactions before obtaining lock (pool.signer is immutable)
for _, tx := range news {
types.Sender(pool.signer, tx)
}
// Process all the new transaction and merge any errors into the original slice
pool.mu.Lock()
newErrs, dirtyAddrs := pool.addTxsLocked(news, local)
pool.mu.Unlock()
var nilSlot = 0
for _, err := range newErrs {
for errs[nilSlot] != nil {
nilSlot++
}
errs[nilSlot] = err
}
// Reorg the pool internals if needed and return
done := pool.requestPromoteExecutables(dirtyAddrs)
if sync {
<-done
}
return errs
}
// addTxsLocked attempts to queue a batch of transactions if they are valid.
// The transaction pool lock must be held.
func (pool *TxPool) addTxsLocked(txs []*types.Transaction, local bool) ([]error, *accountSet) {
dirty := newAccountSet(pool.signer)
errs := make([]error, len(txs))
for i, tx := range txs {
replaced, err := pool.add(tx, local)
errs[i] = err
if err == nil && !replaced {
dirty.addTx(tx)
}
}
validTxMeter.Mark(int64(len(dirty.accounts)))
return errs, dirty
}
// Status returns the status (unknown/pending/queued) of a batch of transactions
// identified by their hashes.
func (pool *TxPool) Status(hashes []common.Hash) []TxStatus {
status := make([]TxStatus, len(hashes))
for i, hash := range hashes {
tx := pool.Get(hash)
if tx == nil {
continue
}
from, _ := types.Sender(pool.signer, tx) // already validated
pool.mu.RLock()
if txList := pool.pending[from]; txList != nil && txList.txs.items[tx.Nonce()] != nil {
status[i] = TxStatusPending
} else if txList := pool.queue[from]; txList != nil && txList.txs.items[tx.Nonce()] != nil {
status[i] = TxStatusQueued
}
// implicit else: the tx may have been included into a block between
// checking pool.Get and obtaining the lock. In that case, TxStatusUnknown is correct
pool.mu.RUnlock()
}
return status
}
// Get returns a transaction if it is contained in the pool and nil otherwise.
func (pool *TxPool) Get(hash common.Hash) *types.Transaction {
return pool.all.Get(hash)
}
// removeTx removes a single transaction from the queue, moving all subsequent
// transactions back to the future queue.
func (pool *TxPool) removeTx(hash common.Hash, outofbound bool) {
// Fetch the transaction we wish to delete
tx := pool.all.Get(hash)
if tx == nil {
return
}
addr, _ := types.Sender(pool.signer, tx) // already validated during insertion
// Remove it from the list of known transactions
pool.all.Remove(hash)
if outofbound {
pool.priced.Removed(1)
}
if pool.locals.contains(addr) {
localGauge.Dec(1)
}
// Remove the transaction from the pending lists and reset the account nonce
if pending := pool.pending[addr]; pending != nil {
if removed, invalids := pending.Remove(tx); removed {
// If no more pending transactions are left, remove the list
if pending.Empty() {
delete(pool.pending, addr)
delete(pool.beats, addr)
}
// Postpone any invalidated transactions
for _, tx := range invalids {
pool.enqueueTx(tx.Hash(), tx)
}
// Update the account nonce if needed
pool.pendingNonces.setIfLower(addr, tx.Nonce())
// Reduce the pending counter
pendingGauge.Dec(int64(1 + len(invalids)))
return
}
}
// Transaction is in the future queue
if future := pool.queue[addr]; future != nil {
if removed, _ := future.Remove(tx); removed {
// Reduce the queued counter
queuedGauge.Dec(1)
}
if future.Empty() {
delete(pool.queue, addr)
}
}
}
// requestPromoteExecutables requests a pool reset to the new head block.
// The returned channel is closed when the reset has occurred.
func (pool *TxPool) requestReset(oldHead *types.Header, newHead *types.Header) chan struct{} {
select {
case pool.reqResetCh <- &txpoolResetRequest{oldHead, newHead}:
return <-pool.reorgDoneCh
case <-pool.reorgShutdownCh:
return pool.reorgShutdownCh
}
}
// requestPromoteExecutables requests transaction promotion checks for the given addresses.
// The returned channel is closed when the promotion checks have occurred.
func (pool *TxPool) requestPromoteExecutables(set *accountSet) chan struct{} {
select {
case pool.reqPromoteCh <- set:
return <-pool.reorgDoneCh
case <-pool.reorgShutdownCh:
return pool.reorgShutdownCh
}
}
// queueTxEvent enqueues a transaction event to be sent in the next reorg run.
func (pool *TxPool) queueTxEvent(tx *types.Transaction) {
select {
case pool.queueTxEventCh <- tx:
case <-pool.reorgShutdownCh:
}
}
// scheduleReorgLoop schedules runs of reset and promoteExecutables. Code above should not
// call those methods directly, but request them being run using requestReset and
// requestPromoteExecutables instead.
func (pool *TxPool) scheduleReorgLoop() {
defer pool.wg.Done()
var (
curDone chan struct{} // non-nil while runReorg is active
nextDone = make(chan struct{})
launchNextRun bool
reset *txpoolResetRequest
dirtyAccounts *accountSet
queuedEvents = make(map[common.Address]*txSortedMap)
)
for {
// Launch next background reorg if needed
if curDone == nil && launchNextRun {
// Run the background reorg and announcements
go pool.runReorg(nextDone, reset, dirtyAccounts, queuedEvents)
// Prepare everything for the next round of reorg
curDone, nextDone = nextDone, make(chan struct{})
launchNextRun = false
reset, dirtyAccounts = nil, nil
queuedEvents = make(map[common.Address]*txSortedMap)
}
select {
case req := <-pool.reqResetCh:
// Reset request: update head if request is already pending.
if reset == nil {
reset = req
} else {
reset.newHead = req.newHead
}
launchNextRun = true
pool.reorgDoneCh <- nextDone
case req := <-pool.reqPromoteCh:
// Promote request: update address set if request is already pending.
if dirtyAccounts == nil {
dirtyAccounts = req
} else {
dirtyAccounts.merge(req)
}
launchNextRun = true
pool.reorgDoneCh <- nextDone
case tx := <-pool.queueTxEventCh:
// Queue up the event, but don't schedule a reorg. It's up to the caller to
// request one later if they want the events sent.
addr, _ := types.Sender(pool.signer, tx)
if _, ok := queuedEvents[addr]; !ok {
queuedEvents[addr] = newTxSortedMap()
}
queuedEvents[addr].Put(tx)
case <-curDone:
curDone = nil
case <-pool.reorgShutdownCh:
// Wait for current run to finish.
if curDone != nil {
<-curDone
}
close(nextDone)
return
}
}
}
// runReorg runs reset and promoteExecutables on behalf of scheduleReorgLoop.
func (pool *TxPool) runReorg(done chan struct{}, reset *txpoolResetRequest, dirtyAccounts *accountSet, events map[common.Address]*txSortedMap) {
defer close(done)
var promoteAddrs []common.Address
if dirtyAccounts != nil {
promoteAddrs = dirtyAccounts.flatten()
}
pool.mu.Lock()
if reset != nil {
// Reset from the old head to the new, rescheduling any reorged transactions
pool.reset(reset.oldHead, reset.newHead)
// Nonces were reset, discard any events that became stale
for addr := range events {
events[addr].Forward(pool.pendingNonces.get(addr))
if events[addr].Len() == 0 {
delete(events, addr)
}
}
// Reset needs promote for all addresses
promoteAddrs = promoteAddrs[:0]
for addr := range pool.queue {
promoteAddrs = append(promoteAddrs, addr)
}
}
// Check for pending transactions for every account that sent new ones
promoted := pool.promoteExecutables(promoteAddrs)
for _, tx := range promoted {
addr, _ := types.Sender(pool.signer, tx)
if _, ok := events[addr]; !ok {
events[addr] = newTxSortedMap()
}
events[addr].Put(tx)
}
// If a new block appeared, validate the pool of pending transactions. This will
// remove any transaction that has been included in the block or was invalidated
// because of another transaction (e.g. higher gas price).
if reset != nil {
pool.demoteUnexecutables()
}
// Ensure pool.queue and pool.pending sizes stay within the configured limits.
pool.truncatePending()
pool.truncateQueue()
// Update all accounts to the latest known pending nonce
for addr, list := range pool.pending {
txs := list.Flatten() // Heavy but will be cached and is needed by the miner anyway
pool.pendingNonces.set(addr, txs[len(txs)-1].Nonce()+1)
}
pool.mu.Unlock()
// Notify subsystems for newly added transactions
if len(events) > 0 {
var txs []*types.Transaction
for _, set := range events {
txs = append(txs, set.Flatten()...)
}
pool.txFeed.Send(NewTxsEvent{txs})
}
}
// reset retrieves the current state of the blockchain and ensures the content
// of the transaction pool is valid with regard to the chain state.
func (pool *TxPool) reset(oldHead, newHead *types.Header) {
// If we're reorging an old state, reinject all dropped transactions
var reinject types.Transactions
if oldHead != nil && oldHead.Hash() != newHead.ParentHash {
// If the reorg is too deep, avoid doing it (will happen during fast sync)
oldNum := oldHead.Number.Uint64()
newNum := newHead.Number.Uint64()
if depth := uint64(math.Abs(float64(oldNum) - float64(newNum))); depth > 64 {
log.Debug("Skipping deep transaction reorg", "depth", depth)
} else {
// Reorg seems shallow enough to pull in all transactions into memory
var discarded, included types.Transactions
var (
rem = pool.chain.GetBlock(oldHead.Hash(), oldHead.Number.Uint64())
add = pool.chain.GetBlock(newHead.Hash(), newHead.Number.Uint64())
)
if rem == nil {
// This can happen if a setHead is performed, where we simply discard the old
// head from the chain.
// If that is the case, we don't have the lost transactions any more, and
// there's nothing to add
if newNum < oldNum {
// If the reorg ended up on a lower number, it's indicative of setHead being the cause
log.Debug("Skipping transaction reset caused by setHead",
"old", oldHead.Hash(), "oldnum", oldNum, "new", newHead.Hash(), "newnum", newNum)
} else {
// If we reorged to a same or higher number, then it's not a case of setHead
log.Warn("Transaction pool reset with missing oldhead",
"old", oldHead.Hash(), "oldnum", oldNum, "new", newHead.Hash(), "newnum", newNum)
}
return
}
for rem.NumberU64() > add.NumberU64() {
discarded = append(discarded, rem.Transactions()...)
if rem = pool.chain.GetBlock(rem.ParentHash(), rem.NumberU64()-1); rem == nil {
log.Error("Unrooted old chain seen by tx pool", "block", oldHead.Number, "hash", oldHead.Hash())
return
}
}
for add.NumberU64() > rem.NumberU64() {
included = append(included, add.Transactions()...)
if add = pool.chain.GetBlock(add.ParentHash(), add.NumberU64()-1); add == nil {
log.Error("Unrooted new chain seen by tx pool", "block", newHead.Number, "hash", newHead.Hash())
return
}
}
for rem.Hash() != add.Hash() {
discarded = append(discarded, rem.Transactions()...)
if rem = pool.chain.GetBlock(rem.ParentHash(), rem.NumberU64()-1); rem == nil {
log.Error("Unrooted old chain seen by tx pool", "block", oldHead.Number, "hash", oldHead.Hash())
return
}
included = append(included, add.Transactions()...)
if add = pool.chain.GetBlock(add.ParentHash(), add.NumberU64()-1); add == nil {
log.Error("Unrooted new chain seen by tx pool", "block", newHead.Number, "hash", newHead.Hash())
return
}
}
reinject = types.TxDifference(discarded, included)
}
}
// Initialize the internal state to the current head
if newHead == nil {
newHead = pool.chain.CurrentBlock().Header() // Special case during testing
}
statedb, err := pool.chain.StateAt(newHead.Root)
if err != nil {
log.Error("Failed to reset txpool state", "err", err)
return
}
pool.currentState = statedb
pool.pendingNonces = newTxNoncer(statedb)
pool.currentMaxGas = newHead.GasLimit
// Inject any transactions discarded due to reorgs
log.Debug("Reinjecting stale transactions", "count", len(reinject))
senderCacher.recover(pool.signer, reinject)
pool.addTxsLocked(reinject, false)
// Update all fork indicator by next pending block number.
next := new(big.Int).Add(newHead.Number, big.NewInt(1))
pool.istanbul = pool.chainconfig.IsIstanbul(next)
}
// promoteExecutables moves transactions that have become processable from the
// future queue to the set of pending transactions. During this process, all
// invalidated transactions (low nonce, low balance) are deleted.
func (pool *TxPool) promoteExecutables(accounts []common.Address) []*types.Transaction {
// Track the promoted transactions to broadcast them at once
var promoted []*types.Transaction
// Iterate over all accounts and promote any executable transactions
for _, addr := range accounts {
list := pool.queue[addr]
if list == nil {
continue // Just in case someone calls with a non existing account
}
// Drop all transactions that are deemed too old (low nonce)
forwards := list.Forward(pool.currentState.GetNonce(addr))
for _, tx := range forwards {
hash := tx.Hash()
pool.all.Remove(hash)
log.Trace("Removed old queued transaction", "hash", hash)
}
// Drop all transactions that are too costly (low balance or out of gas)
drops, _ := list.Filter(pool.currentState.GetBalance(addr), pool.currentMaxGas)
for _, tx := range drops {
hash := tx.Hash()
pool.all.Remove(hash)
log.Trace("Removed unpayable queued transaction", "hash", hash)
}
queuedNofundsMeter.Mark(int64(len(drops)))
// Gather all executable transactions and promote them
readies := list.Ready(pool.pendingNonces.get(addr))
for _, tx := range readies {
hash := tx.Hash()
if pool.promoteTx(addr, hash, tx) {
log.Trace("Promoting queued transaction", "hash", hash)
promoted = append(promoted, tx)
}
}
queuedGauge.Dec(int64(len(readies)))
// Drop all transactions over the allowed limit
var caps types.Transactions
if !pool.locals.contains(addr) {
caps = list.Cap(int(pool.config.AccountQueue))
for _, tx := range caps {
hash := tx.Hash()
pool.all.Remove(hash)
log.Trace("Removed cap-exceeding queued transaction", "hash", hash)
}
queuedRateLimitMeter.Mark(int64(len(caps)))
}
// Mark all the items dropped as removed
pool.priced.Removed(len(forwards) + len(drops) + len(caps))
queuedGauge.Dec(int64(len(forwards) + len(drops) + len(caps)))
if pool.locals.contains(addr) {
localGauge.Dec(int64(len(forwards) + len(drops) + len(caps)))
}
// Delete the entire queue entry if it became empty.
if list.Empty() {
delete(pool.queue, addr)
}
}
return promoted
}
// truncatePending removes transactions from the pending queue if the pool is above the
// pending limit. The algorithm tries to reduce transaction counts by an approximately
// equal number for all for accounts with many pending transactions.
func (pool *TxPool) truncatePending() {
pending := uint64(0)
for _, list := range pool.pending {
pending += uint64(list.Len())
}
if pending <= pool.config.GlobalSlots {
return
}
pendingBeforeCap := pending
// Assemble a spam order to penalize large transactors first
spammers := prque.New(nil)
for addr, list := range pool.pending {
// Only evict transactions from high rollers
if !pool.locals.contains(addr) && uint64(list.Len()) > pool.config.AccountSlots {
spammers.Push(addr, int64(list.Len()))
}
}
// Gradually drop transactions from offenders
offenders := []common.Address{}
for pending > pool.config.GlobalSlots && !spammers.Empty() {
// Retrieve the next offender if not local address
offender, _ := spammers.Pop()
offenders = append(offenders, offender.(common.Address))
// Equalize balances until all the same or below threshold
if len(offenders) > 1 {
// Calculate the equalization threshold for all current offenders
threshold := pool.pending[offender.(common.Address)].Len()
// Iteratively reduce all offenders until below limit or threshold reached
for pending > pool.config.GlobalSlots && pool.pending[offenders[len(offenders)-2]].Len() > threshold {
for i := 0; i < len(offenders)-1; i++ {
list := pool.pending[offenders[i]]
caps := list.Cap(list.Len() - 1)
for _, tx := range caps {
// Drop the transaction from the global pools too
hash := tx.Hash()
pool.all.Remove(hash)
// Update the account nonce to the dropped transaction
pool.pendingNonces.setIfLower(offenders[i], tx.Nonce())
log.Trace("Removed fairness-exceeding pending transaction", "hash", hash)
}
pool.priced.Removed(len(caps))
pendingGauge.Dec(int64(len(caps)))
if pool.locals.contains(offenders[i]) {
localGauge.Dec(int64(len(caps)))
}
pending--
}
}
}
}
// If still above threshold, reduce to limit or min allowance
if pending > pool.config.GlobalSlots && len(offenders) > 0 {
for pending > pool.config.GlobalSlots && uint64(pool.pending[offenders[len(offenders)-1]].Len()) > pool.config.AccountSlots {
for _, addr := range offenders {
list := pool.pending[addr]
caps := list.Cap(list.Len() - 1)
for _, tx := range caps {
// Drop the transaction from the global pools too
hash := tx.Hash()
pool.all.Remove(hash)
// Update the account nonce to the dropped transaction
pool.pendingNonces.setIfLower(addr, tx.Nonce())
log.Trace("Removed fairness-exceeding pending transaction", "hash", hash)
}
pool.priced.Removed(len(caps))
pendingGauge.Dec(int64(len(caps)))
if pool.locals.contains(addr) {
localGauge.Dec(int64(len(caps)))
}
pending--
}
}
}
pendingRateLimitMeter.Mark(int64(pendingBeforeCap - pending))
}
// truncateQueue drops the oldes transactions in the queue if the pool is above the global queue limit.
func (pool *TxPool) truncateQueue() {
queued := uint64(0)
for _, list := range pool.queue {
queued += uint64(list.Len())
}
if queued <= pool.config.GlobalQueue {
return
}
// Sort all accounts with queued transactions by heartbeat
addresses := make(addressesByHeartbeat, 0, len(pool.queue))
for addr := range pool.queue {
if !pool.locals.contains(addr) { // don't drop locals
addresses = append(addresses, addressByHeartbeat{addr, pool.beats[addr]})
}
}
sort.Sort(addresses)
// Drop transactions until the total is below the limit or only locals remain
for drop := queued - pool.config.GlobalQueue; drop > 0 && len(addresses) > 0; {
addr := addresses[len(addresses)-1]
list := pool.queue[addr.address]
addresses = addresses[:len(addresses)-1]
// Drop all transactions if they are less than the overflow
if size := uint64(list.Len()); size <= drop {
for _, tx := range list.Flatten() {
pool.removeTx(tx.Hash(), true)
}
drop -= size
queuedRateLimitMeter.Mark(int64(size))
continue
}
// Otherwise drop only last few transactions
txs := list.Flatten()
for i := len(txs) - 1; i >= 0 && drop > 0; i-- {
pool.removeTx(txs[i].Hash(), true)
drop--
queuedRateLimitMeter.Mark(1)
}
}
}
// demoteUnexecutables removes invalid and processed transactions from the pools
// executable/pending queue and any subsequent transactions that become unexecutable
// are moved back into the future queue.
func (pool *TxPool) demoteUnexecutables() {
// Iterate over all accounts and demote any non-executable transactions
for addr, list := range pool.pending {
nonce := pool.currentState.GetNonce(addr)
// Drop all transactions that are deemed too old (low nonce)
olds := list.Forward(nonce)
for _, tx := range olds {
hash := tx.Hash()
pool.all.Remove(hash)
log.Trace("Removed old pending transaction", "hash", hash)
}
// Drop all transactions that are too costly (low balance or out of gas), and queue any invalids back for later
drops, invalids := list.Filter(pool.currentState.GetBalance(addr), pool.currentMaxGas)
for _, tx := range drops {
hash := tx.Hash()
log.Trace("Removed unpayable pending transaction", "hash", hash)
pool.all.Remove(hash)
}
pool.priced.Removed(len(olds) + len(drops))
pendingNofundsMeter.Mark(int64(len(drops)))
for _, tx := range invalids {
hash := tx.Hash()
log.Trace("Demoting pending transaction", "hash", hash)
pool.enqueueTx(hash, tx)
}
pendingGauge.Dec(int64(len(olds) + len(drops) + len(invalids)))
if pool.locals.contains(addr) {
localGauge.Dec(int64(len(olds) + len(drops) + len(invalids)))
}
// If there's a gap in front, alert (should never happen) and postpone all transactions
if list.Len() > 0 && list.txs.Get(nonce) == nil {
gapped := list.Cap(0)
for _, tx := range gapped {
hash := tx.Hash()
log.Error("Demoting invalidated transaction", "hash", hash)
pool.enqueueTx(hash, tx)
}
pendingGauge.Dec(int64(len(gapped)))
}
// Delete the entire queue entry if it became empty.
if list.Empty() {
delete(pool.pending, addr)
delete(pool.beats, addr)
}
}
}
// addressByHeartbeat is an account address tagged with its last activity timestamp.
type addressByHeartbeat struct {
address common.Address
heartbeat time.Time
}
type addressesByHeartbeat []addressByHeartbeat
func (a addressesByHeartbeat) Len() int { return len(a) }
func (a addressesByHeartbeat) Less(i, j int) bool { return a[i].heartbeat.Before(a[j].heartbeat) }
func (a addressesByHeartbeat) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
// accountSet is simply a set of addresses to check for existence, and a signer
// capable of deriving addresses from transactions.
type accountSet struct {
accounts map[common.Address]struct{}
signer types.Signer
cache *[]common.Address
}
// newAccountSet creates a new address set with an associated signer for sender
// derivations.
func newAccountSet(signer types.Signer, addrs ...common.Address) *accountSet {
as := &accountSet{
accounts: make(map[common.Address]struct{}),
signer: signer,
}
for _, addr := range addrs {
as.add(addr)
}
return as
}
// contains checks if a given address is contained within the set.
func (as *accountSet) contains(addr common.Address) bool {
_, exist := as.accounts[addr]
return exist
}
// containsTx checks if the sender of a given tx is within the set. If the sender
// cannot be derived, this method returns false.
func (as *accountSet) containsTx(tx *types.Transaction) bool {
if addr, err := types.Sender(as.signer, tx); err == nil {
return as.contains(addr)
}
return false
}
// add inserts a new address into the set to track.
func (as *accountSet) add(addr common.Address) {
as.accounts[addr] = struct{}{}
as.cache = nil
}
// addTx adds the sender of tx into the set.
func (as *accountSet) addTx(tx *types.Transaction) {
if addr, err := types.Sender(as.signer, tx); err == nil {
as.add(addr)
}
}
// flatten returns the list of addresses within this set, also caching it for later
// reuse. The returned slice should not be changed!
func (as *accountSet) flatten() []common.Address {
if as.cache == nil {
accounts := make([]common.Address, 0, len(as.accounts))
for account := range as.accounts {
accounts = append(accounts, account)
}
as.cache = &accounts
}
return *as.cache
}
// merge adds all addresses from the 'other' set into 'as'.
func (as *accountSet) merge(other *accountSet) {
for addr := range other.accounts {
as.accounts[addr] = struct{}{}
}
as.cache = nil
}
// txLookup is used internally by TxPool to track transactions while allowing lookup without
// mutex contention.
//
// Note, although this type is properly protected against concurrent access, it
// is **not** a type that should ever be mutated or even exposed outside of the
// transaction pool, since its internal state is tightly coupled with the pools
// internal mechanisms. The sole purpose of the type is to permit out-of-bound
// peeking into the pool in TxPool.Get without having to acquire the widely scoped
// TxPool.mu mutex.
type txLookup struct {
all map[common.Hash]*types.Transaction
lock sync.RWMutex
}
// newTxLookup returns a new txLookup structure.
func newTxLookup() *txLookup |
// Range calls f on each key and value present in the map.
func (t *txLookup) Range(f func(hash common.Hash, tx *types.Transaction) bool) {
t.lock.RLock()
defer t.lock.RUnlock()
for key, value := range t.all {
if !f(key, value) {
break
}
}
}
// Get returns a transaction if it exists in the lookup, or nil if not found.
func (t *txLookup) Get(hash common.Hash) *types.Transaction {
t.lock.RLock()
defer t.lock.RUnlock()
return t.all[hash]
}
// Count returns the current number of items in the lookup.
func (t *txLookup) Count() int {
t.lock.RLock()
defer t.lock.RUnlock()
return len(t.all)
}
// Add adds a transaction to the lookup.
func (t *txLookup) Add(tx *types.Transaction) {
t.lock.Lock()
defer t.lock.Unlock()
t.all[tx.Hash()] = tx
}
// Remove removes a transaction from the lookup.
func (t *txLookup) Remove(hash common.Hash) {
t.lock.Lock()
defer t.lock.Unlock()
delete(t.all, hash)
}
| {
return &txLookup{
all: make(map[common.Hash]*types.Transaction),
}
} |
header-modal.tsx | import cn from 'classnames'
import { Icon } from '@ui/icon'
import { DecisionTreeModalDataCy } from '@components/data-testid/decision-tree-modal.cy'
import { SwitchTheme } from '../../../query-builder/ui/switch-theme'
interface IHeaderModalProps {
groupName?: string
handleClose: () => void
theme?: string
isTextEditor?: boolean
handleChangeTheme?: any
}
export const HeaderModal = ({
groupName,
handleClose,
theme = 'light',
isTextEditor,
handleChangeTheme,
}: IHeaderModalProps) => (
<div className="flex w-full justify-between items-center font-medium flex-grow-0 flex-shrink-0">
<div
className={cn(theme === 'light' ? 'text-black' : 'text-white', 'flex')}
data-testid={DecisionTreeModalDataCy.modalHeader}
>
{groupName}
{isTextEditor && (
<SwitchTheme handleChangeTheme={handleChangeTheme} theme={theme} />
)} | </div>
<Icon
name="Close"
size={16}
className={cn(
'cursor-pointer',
theme === 'light' ? 'text-black' : 'text-white',
)}
onClick={handleClose}
/>
</div>
) | |
icdar2015.py | dataset_type = 'IcdarDataset'
data_root = 'data/icdar2015'
train = dict(
type=dataset_type,
ann_file=f'{data_root}/instances_training.json',
img_prefix=f'{data_root}/imgs',
pipeline=None)
test = dict(
type=dataset_type,
ann_file=f'{data_root}/instances_test.json', |
test_list = [test] | img_prefix=f'{data_root}/imgs',
pipeline=None)
train_list = [train] |
load_sqldump.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Download SQL file of MySQL database by phpMyAdmin
'''
import re
import os
import sys
import base64
import urllib
import urllib2
import traceback
from cookielib import CookieJar, DefaultCookiePolicy
from pprint import pprint
__author__ = 'furyu ([email protected])'
__version__ = '0.0.1e'
__copyright__ = 'Copyright (c) 2014 furyu'
__license__ = 'New BSD License'
def prn(message, linefeed = True): #{
if isinstance(message, unicode):
message = message.encode('utf-8', 'replace')
if linefeed:
print message
else:
print message,
#} // end of def prn()
def | (message, linefeed = True): #{
if isinstance(message, unicode):
message = message.encode('utf-8', 'replace')
if linefeed:
print >> sys.stderr, message
else:
print >> sys.stderr, message,
#} // end of def prn_error()
class LoadSqlDump(object): #{
#{ class variables
DEFAULT_HEADER_DICT = {
'Accept-Charset': 'Shift_JIS,utf-8;q=0.7,*;q=0.7',
'Accept-Language': 'ja,en-us;q=0.7,en;q=0.3',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36',
}
DEFAULT_PMA_LOGIN_PARAM_DICT = dict(
server = u'1',
target = u'index.php',
)
DEFAULT_PARAM_DICT = dict(
server = u'1',
export_type = u'server',
export_method = u'quick',
quick_or_custom = u'custom',
output_format = u'sendit',
filename_template = u'@SERVER@',
remember_template = u'on',
charset_of_file = u'utf-8',
compression = u'none', # none, zip, gzip
what = u'sql',
codegen_structure_or_data = u'data',
codegen_format = u'0',
csv_separator = u'',
csv_enclosed = u'"',
csv_escaped = u'"',
csv_terminated = u'AUTO',
csv_null = u'NULL',
csv_structure_or_data = u'data',
excel_null = u'NULL',
excel_edition = u'win',
excel_structure_or_data = u'data',
htmlword_structure_or_data = u'structure_and_data',
htmlword_null = u'NULL',
json_structure_or_data = u'data',
latex_caption = u'something',
latex_structure_or_data = u'structure_and_data',
latex_structure_caption = u'テーブル @TABLE@ の構造',
latex_structure_continued_caption = u'テーブル @TABLE@ の構造 (続き)',
latex_structure_label = u'tab:@TABLE@-structure',
latex_comments = u'something',
latex_columns = u'something',
latex_data_caption = u'テーブル @TABLE@ の内容',
latex_data_continued_caption = u'テーブル @TABLE@ の内容 (続き)',
latex_data_label = u'tab:@TABLE@-data',
latex_null = u'\textit{NULL}',
mediawiki_structure_or_data = u'data',
ods_null = u'NULL',
ods_structure_or_data = u'data',
odt_structure_or_data = u'structure_and_data',
odt_comments = u'something',
odt_columns = u'something',
odt_null = u'NULL',
pdf_report_title = u'',
pdf_structure_or_data = u'data',
php_array_structure_or_data = u'data',
sql_include_comments = u'something',
sql_header_comment = u'',
sql_compatibility = u'NONE',
sql_structure_or_data = u'structure_and_data',
sql_drop_table = u'something', # "DROP TABLE / VIEW / PROCEDURE / FUNCTION コマンドの追加" にチェック
sql_procedure_function = u'something',
sql_create_table_statements = u'something',
sql_if_not_exists = u'something',
sql_auto_increment = u'something',
sql_backquotes = u'something',
sql_type = u'INSERT',
sql_insert_syntax = u'both',
sql_max_query_size = u'50000',
sql_hex_for_blob = u'something',
sql_utc_time = u'something',
texytext_structure_or_data = u'structure_and_data',
texytext_null = u'NULL',
yaml_structure_or_data = u'data',
knjenc = u'',
)
RE_TOKEN = re.compile(u"token\s*=[^\w]*(\w+)[^\w]")
KB = 1024
MB = 1024*1024
BUFSIZE = 256*1024
CODEC_LIST = ('utf_8', 'euc_jp', 'cp932',) # see http://docs.python.org/2.7/library/codecs.html#standard-encodings
RE_ESC_SEQ = re.compile('\x1b($B|$@|\(B|\(J| A)')
PMA_CODEC = 'utf-8'
#} // end of class variables
def __init__(self, url_phpmyadmin_top, user=None, passwd=None, pma_username=None, pma_password=None, tgt_dir=None, server_number=1, quiet=False, param_dict=None): #{
'''
url_phpmyadmin_top: URL of phpMyAdmin's toppage
user : user name for Basic Authentication
passwd : password for Basic Authentication
pma_username : user name for phpMyAdmin
pma_password : password for phpMyAdmin
tgt_dir : directory to save
server_number : MySQL server number
quiet : (True) quiet mode
param_dict : additional parameter's dictionary to export
'''
(src_codec, url_phpmyadmin_top) = self._str_decode(url_phpmyadmin_top)
url_phpmyadmin_top = re.sub(u'/index\.php(\?.*)?$', ur'', url_phpmyadmin_top)
if not re.search(u'/$', url_phpmyadmin_top): url_phpmyadmin_top += '/'
self.url_phpmyadmin_top = url_phpmyadmin_top
self.pma_username = pma_username
self.pma_password = pma_password
self.quiet = quiet
self.last_url = ''
try:
self.server_number = unicode(int(server_number))
except:
self.server_number = u'1'
self.header_dict = self.DEFAULT_HEADER_DICT.copy()
if user and passwd:
self.header_dict['Authorization'] = 'Basic %s' % (base64.b64encode('%s:%s' % (user, passwd)))
self.login_param_dict = self.DEFAULT_PMA_LOGIN_PARAM_DICT.copy()
self.login_param_dict['server'] = self.server_number
self.param_dict = self.DEFAULT_PARAM_DICT.copy()
if isinstance(param_dict, dict): self.param_dict.update(param_dict)
self.param_dict['server'] = self.server_number
self.url_opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(CookieJar(policy=DefaultCookiePolicy(rfc2965=True, netscape=True))))
self.filename_codec = sys.getfilesystemencoding()
if tgt_dir:
(src_codec, tgt_dir) = self._str_decode(tgt_dir)
else:
tgt_dir = '.'
tgt_dir_enc = tgt_dir.encode(self.filename_codec, 'ignore')
if not os.path.isdir(tgt_dir_enc):
try:
os.makedirs(tgt_dir_enc)
except:
prn_error( traceback.format_exc() )
prn_error( u'Error: cannot create "%s"' % (tgt_dir) )
tgt_dir = '.'
tgt_dir_enc = tgt_dir.encode(self.filename_codec, 'ignore')
self.tgt_dir = tgt_dir
self.tgt_dir_enc = tgt_dir_enc
if not self.quiet:
prn( u'phpMyAdmin: %s' % (self.url_phpmyadmin_top) )
prn( u'directory : %s' % (tgt_dir) )
#} // end of def __init__()
def do(self, db_name): #{
"""
download "<db_name>.sql" via phpMyAdmin
"""
(src_codec, db_name) = self._str_decode(db_name)
filename = u'%s.sql' % (db_name)
if not self.quiet:
prn( u'%s' % (filename) )
flg_success = False
while True:
token = self._get_token()
if not token: break
url = self.url_phpmyadmin_top + 'export.php'
self.param_dict.update(
token = token,
db_select = [db_name],
)
rsp = self._fetch(url, data=self._make_data(self.param_dict))
if rsp.code < 200 or 300 <= rsp.code:
prn_error( u'Error: %s => %d: %s' % (url, rsp.code, rsp.msg) )
break
filename_enc = os.path.join(self.tgt_dir_enc, filename.encode(self.filename_codec, 'ignore'))
fp = open(filename_enc, 'wb')
#info = rsp.info()
#for key in info.keys():
# prn( '%s=%s' % (key, info.getheader(key)) )
#fp.write(rsp.read())
size = 0
for buf in iter(lambda:rsp.read(self.BUFSIZE),''):
fp.write(buf)
size += len(buf)
if not self.quiet:
if size < self.MB:
prn( '\r%6d KB' % (size//self.KB), False )
else:
prn( '\r%6.2f MB' % (float(size)/self.MB), False )
sys.stdout.flush()
fp.close()
if not self.quiet: prn( '' )
flg_success = True
break
return flg_success
#} // end of def exec()
def _get_token(self): #{
def _get_token_from_rsp(rsp): #{
(token, content) = (None, None)
while True:
if rsp.code < 200 or 300 <= rsp.code:
prn_error( u'Error: %s => %d: %s' % (url, rsp.code, rsp.msg) )
break
(src_codec, content) = self._str_decode(rsp.read())
mrslt = self.RE_TOKEN.search(content)
if not mrslt:
prn_error( u'Error: token not found' )
break
token = mrslt.group(1)
break
return (token, content)
#} // end of def _get_token_from_rsp()
url = self.url_phpmyadmin_top
rsp = self._fetch(url)
(token, content) = _get_token_from_rsp(rsp)
while token:
if not re.search(u'name="pma_username"', content): break
(pma_username, pma_password) = (self.pma_username, self.pma_password)
if not pma_username or not pma_password:
prn_error( u'Error: both pma_username and pma_password required' )
token = None
break
param_dict = self.login_param_dict
param_dict.update(dict(
pma_username = pma_username,
pma_password = pma_password,
token = token,
))
rsp = self._fetch(url, data=self._make_data(param_dict))
(token, content) = _get_token_from_rsp(rsp)
if re.search(u'name="pma_username"', content):
prn_error( u'Error: incorrect pma_username or pma_password' )
token = None
break
break
self.token = token
return token
#} // end of def _get_token()
def _quote(self, param, charset='utf-8'): #{
return urllib.quote(param.encode(charset, 'ignore'), safe='~')
#} // end of def _quote()
def _make_data(self, param_dict): #{
query_list=[]
quote = lambda s: self._quote(s, self.PMA_CODEC)
for key in sorted(param_dict.keys()):
if isinstance(param_dict[key], list):
for param in param_dict[key]:
query_list.append('%s[]=%s' % (quote(key), quote(param)))
else:
query_list.append('%s=%s' % (quote(key), quote(param_dict[key])))
return '&'.join(query_list)
#} // end of def _make_data()
def _fetch(self, url, data=None, headers=None): #{
mrslt = re.search(u'^(https?://)([^/]+)(.*)$', url)
proto = mrslt.group(1).encode(self.PMA_CODEC,'ignore')
domain = mrslt.group(2).encode('idna')
path = mrslt.group(3).encode(self.PMA_CODEC, 'ignore')
url = proto + domain + path
if not headers:
headers = self.header_dict
headers['Referer'] = self.last_url
rsp = self.url_opener.open(urllib2.Request(url, data=data, headers=headers))
self.last_url = rsp.geturl()
return rsp
#} // end of def _fetch()
def _str_decode(self, src_str): #{
(src_codec, dec_str) = (None, src_str)
while True:
if not isinstance(src_str, basestring): break
if isinstance(src_str, unicode):
src_codec = 'unicode_internal'
break
try:
dec_str = src_str.decode('iso2022_jp')
src_codec = self.RE_ESC_SEQ.search(src_str) and 'iso2022_jp' or 'ascii'
break
except UnicodeDecodeError, s:
pass
for test_codec in self.CODEC_LIST:
try:
dec_str = src_str.decode(test_codec)
src_codec = test_codec
break
except UnicodeDecodeError, s:
pass
break
return (src_codec, dec_str)
#} // end of def _str_decode()
#} // end of class LoadSqlDump()
if __name__ == '__main__': #{
import optparse
usage = u"./%prog [options] <phpMyAdmin's URL> <database name> [<database name> ...]"
optparser = optparse.OptionParser(usage=usage, version=__version__)
optparser.add_option(
'-u', '--ba-user',
action = 'store',
metavar = '<BASIC-AUTH USER>',
help = u"user name for Basic Authentication",
dest = 'user'
)
optparser.add_option(
'-p', '--ba-passwd',
action = 'store',
metavar = '<BASIC-AUTH PASSWORD>',
help = u"password for Basic Authentication",
dest = 'passwd'
)
optparser.add_option(
'-n', '--pma-user',
action = 'store',
metavar = '<PMA-USER>',
help = u"user password for phpMyAdmin",
dest = 'pma_username'
)
optparser.add_option(
'-w', '--pma-passwd',
action = 'store',
metavar = '<PMA-PASSWORD>',
help = u"user name for phpMyAdmin",
dest = 'pma_password'
)
optparser.add_option(
'-s', '--server-number',
type= 'int',
#default = 1,
metavar = '<SERVER NUMBER>',
help = u'MySQL server number(default: 1)',
dest = 'server_number',
)
optparser.add_option(
'-d', '--directory',
action = 'store',
metavar = '<DIRECTORY>',
help = u"directory to save",
dest = 'tgt_dir'
)
optparser.add_option(
'-q','--quiet'
, action = 'store_true'
, help = u"quiet mode"
, dest = 'quiet'
)
optparser.add_option(
'-f', '--option-list-file',
action = 'store',
metavar = '<OPTION LIST FILE>',
help = u"option list file",
dest = 'option_file'
)
(options, args) = optparser.parse_args()
# --- デフォルト
(user, passwd) = (None, None)
(pma_username, pma_password) = (None, None)
server_number = 1
tgt_dir = None
quiet = False
if options.option_file:
fp = open(options.option_file, 'rb')
_argv = []
for line in fp:
line = line.strip()
mrslt = re.search('^(-\w)\s+(.*)$', line)
if mrslt:
_argv.append(mrslt.group(1))
_argv.append(mrslt.group(2))
else:
_argv.append(line)
fp.close()
(_options, _args) = optparser.parse_args(_argv)
# --- オプションファイルでの指定
if _options.user is not None: user = _options.user
if _options.passwd is not None: passwd = _options.passwd
if _options.pma_username is not None: pma_username = _options.pma_username
if _options.pma_password is not None: pma_password = _options.pma_password
if _options.server_number is not None: server_number = _options.server_number
if _options.tgt_dir is not None: tgt_dir = _options.tgt_dir
if _options.quiet is not None: quiet = _options.quiet
# --- ユーザ指定
if options.user is not None: user = options.user
if options.passwd is not None: passwd = options.passwd
if options.pma_username is not None: pma_username = options.pma_username
if options.pma_password is not None: pma_password = options.pma_password
if options.server_number is not None: server_number = options.server_number
if options.tgt_dir is not None: tgt_dir = options.tgt_dir
if options.quiet is not None: quiet = options.quiet
if 1 < len(args):
exit_code = 0
url_phpmyadmin_top = args[0]
load_sqldump = LoadSqlDump(url_phpmyadmin_top, user=user, passwd=passwd, pma_username=pma_username, pma_password=pma_password, tgt_dir=tgt_dir, server_number=server_number, quiet=quiet)
for db_name in args[1:]:
if not load_sqldump.do(db_name):
exit_code += 1
exit(exit_code)
else:
optparser.print_help()
exit(255)
#} // end of __main__
# ■ end of file
| prn_error |
loader.go | package ui
import (
"fmt"
"time"
"github.com/maxence-charriere/go-app/v9/pkg/app"
)
const (
defaultLoaderErrorIcon = `<svg style="width:%vpx;height:%vpx" viewBox="0 0 24 24">
<path fill="currentColor" d="M22 14H21C21 10.13 17.87 7 14 7H13V5.73C13.6 5.39 14 4.74 14 4C14 2.9 13.11 2 12 2S10 2.9 10 4C10 4.74 10.4 5.39 11 5.73V7H10C6.13 7 3 10.13 3 14H2C1.45 14 1 14.45 1 15V18C1 18.55 1.45 19 2 19H3V20C3 21.11 3.9 22 5 22H19C20.11 22 21 21.11 21 20V19H22C22.55 19 23 18.55 23 18V15C23 14.45 22.55 14 22 14M9.86 16.68L8.68 17.86L7.5 16.68L6.32 17.86L5.14 16.68L6.32 15.5L5.14 14.32L6.32 13.14L7.5 14.32L8.68 13.14L9.86 14.32L8.68 15.5L9.86 16.68M18.86 16.68L17.68 17.86L16.5 16.68L15.32 17.86L14.14 16.68L15.32 15.5L14.14 14.32L15.32 13.14L16.5 14.32L17.68 13.14L18.86 14.32L17.68 15.5L18.86 16.68Z" />
</svg>`
)
type ILoader interface {
app.UI
// Sets the ID.
ID(v string) ILoader
// Sets the class. Multiple classes can be defined by successive calls.
Class(v string) ILoader
// Sets the style. Multiple styles can be defined by successive calls.
Style(k, v string) ILoader
// Reports whether the loader is active.
Loading(v bool) ILoader
// Sets the size of the rotating circle in px. Default is 60px.
Size(px int) ILoader
// Sets the color of the rotating head. Default is white.
Color(v string) ILoader
// Sets the time it take to fully rotate. Default is 500ms.
Speed(v time.Duration) ILoader
// Sets the space between the loader and the label in px. Default is 18px.
Spacing(px int) ILoader
// Sets the label. Default is "Loading...".
Label(v string) ILoader
// Sets the error that occured during loading.
Err(err error) ILoader
// Sets the error icon.
ErrIcon(v string) ILoader
}
func Loader() ILoader {
return &loader{
Isize: 60,
Icolor: "white",
Ispeed: time.Millisecond * 500,
Ispacing: 18,
Ilabel: "Loading...",
IerrIcon: defaultLoaderErrorIcon,
}
}
type loader struct {
app.Compo
Iid string
Iclass string
Istyles []style
Iloading bool
Isize int
Icolor string
Ispacing int
Ilabel string
Ispeed time.Duration
Ierr error
IerrIcon string
}
func (l *loader) ID(v string) ILoader {
l.Iid = v
return l
}
func (l *loader) Class(v string) ILoader {
l.Iclass = app.AppendClass(l.Iclass, v)
return l
}
func (l *loader) Style(k, v string) ILoader {
if v == "" {
return l
}
l.Istyles = append(l.Istyles, style{
key: k,
value: v,
})
return l
}
func (l *loader) Loading(v bool) ILoader {
l.Iloading = v
return l
}
func (l *loader) Size(px int) ILoader {
l.Isize = px
return l
}
func (l *loader) Color(v string) ILoader {
l.Icolor = v
return l
}
func (l *loader) Speed(v time.Duration) ILoader {
l.Ispeed = v
return l
}
func (l *loader) Spacing(px int) ILoader {
l.Ispacing = px
return l
}
func (l *loader) Label(v string) ILoader {
l.Ilabel = v
return l
}
func (l *loader) Err(err error) ILoader {
l.Ierr = err
return l
}
func (l *loader) ErrIcon(v string) ILoader {
l.IerrIcon = v
return l
}
func (l *loader) Render() app.UI {
body := app.Aside().
ID(l.Iid).
Class(l.Iclass).
Body(
Stack().
Style("width", "100%").
Style("height", "100%").
Center().
Middle().
Content(
app.If(l.Ierr == nil,
app.Div().
Style("width", pxToString(l.Isize-4)).
Style("height", pxToString(l.Isize-4)).
Style("border", "2px solid currentColor").
Style("border-top", "2px solid "+l.Icolor).
Style("border-radius", "50%").
Style("animation", fmt.Sprintf("goapp-spin-frames %vms linear infinite", l.Ispeed.Milliseconds())),
).Else(
Icon().
Size(l.Isize).
Src(l.IerrIcon),
),
app.Div().
Style("margin-left", pxToString(l.Ispacing)).
Body(
app.If(l.Ierr == nil,
app.Div().Text(l.Ilabel),
).Else(
app.Div().
Style("white-space", "pre-wrap").
Text(l.Ierr),
),
),
),
)
for _, s := range l.Istyles {
body.Style(s.key, s.value)
}
if l.Ierr == nil && !l.Iloading |
return body
}
| {
body.Style("display", "none")
} |
sys_base_menu.go | package service
import (
"errors"
"little-gin-admin/global"
"little-gin-admin/model"
)
// @title DeleteBaseMenu
// @description 删除基础路由
// @auth (2020/04/05 20:22)
// @param id float64
// @return err error
func DeleteBaseMenu(id float64) (err error) {
err = global.DB.Where("parent_id = ?", id).First(&model.SysBaseMenu{}).Error
if err != nil {
var menu model.SysBaseMenu
db := global.DB.Preload("SysAuthoritys").Where("id = ?", id).First(&menu).Delete(&menu)
if len(menu.SysAuthoritys) > 0 {
err = db.Association("SysAuthoritys").Delete(menu.SysAuthoritys).Error
} else {
err = db.Error
}
} else {
return errors.New("此菜单存在子菜单不可删除")
}
return err
}
// @title UpdateBaseMenu
// @description 更新路由
// @auth (2020/04/05 20:22)
// @param menu model.SysBaseMenu
// @return err errorgetMenu
func UpdateBaseMenu(menu model.SysBaseMenu) (err error) { | model.SysBaseMenu
upDateMap := make(map[string]interface{})
upDateMap["keep_alive"] = menu.KeepAlive
upDateMap["default_menu"] = menu.DefaultMenu
upDateMap["parent_id"] = menu.ParentId
upDateMap["path"] = menu.Path
upDateMap["name"] = menu.Name
upDateMap["hidden"] = menu.Hidden
upDateMap["component"] = menu.Component
upDateMap["title"] = menu.Title
upDateMap["icon"] = menu.Icon
upDateMap["sort"] = menu.Sort
db := global.DB.Where("id = ?", menu.ID).Find(&oldMenu)
if oldMenu.Name != menu.Name {
notSame := global.DB.Where("id <> ? AND name = ?", menu.ID, menu.Name).First(&model.SysBaseMenu{}).RecordNotFound()
if !notSame {
global.Logger.Debug("存在相同name修改失败")
return errors.New("存在相同name修改失败")
}
}
err = db.Updates(upDateMap).Error
global.Logger.Debug("菜单修改时候,关联菜单err:%v", err)
return err
}
// @title GetBaseMenuById
// @description get current menus, 返回当前选中menu
// @auth (2020/04/05 20:22)
// @param id float64
// @return err error
func GetBaseMenuById(id float64) (err error, menu model.SysBaseMenu) {
err = global.DB.Where("id = ?", id).First(&menu).Error
return
}
|
var oldMenu |
index.js | import { gql, useQuery } from "@apollo/client";
const dappLearningCollectibles = gql`
{
dappLearningCollectibles(first: 108) {
id
tokenId
owner
isBurn
isAuction
}
} | query getCurrent($address:Bytes){
dappLearningCollectibles(first: 108,where: { owner: $address }) {
id
tokenId
owner
isBurn
}
}
`
export { dappLearningCollectibles, getCurrentColl }; | `;
const getCurrentColl = gql` |
fast_ica.rs | //! Fast algorithm for Independent Component Analysis (ICA)
use linfa::{
dataset::{DatasetBase, Records, WithLapack, WithoutLapack},
traits::*,
Float,
};
use ndarray::{Array, Array1, Array2, ArrayBase, Axis, Data, Ix2};
use ndarray_linalg::{eigh::Eigh, solveh::UPLO, svd::SVD};
use ndarray_rand::{rand::SeedableRng, rand_distr::Uniform, RandomExt};
use ndarray_stats::QuantileExt;
use rand_isaac::Isaac64Rng;
#[cfg(feature = "serde")]
use serde_crate::{Deserialize, Serialize};
use crate::error::{FastIcaError, Result};
use crate::hyperparams::FastIcaValidParams;
impl<F: Float, D: Data<Elem = F>, T> Fit<ArrayBase<D, Ix2>, T, FastIcaError>
for FastIcaValidParams<F>
{
type Object = FastIca<F>;
/// Fit the model
///
/// # Errors
///
/// If the [`FastIca::ncomponents`] is set to a number greater than the minimum of
/// the number of rows and columns
///
/// If the `alpha` value set for [`GFunc::Logcosh`] is not between 1 and 2
/// inclusive
fn fit(&self, dataset: &DatasetBase<ArrayBase<D, Ix2>, T>) -> Result<Self::Object> {
let x = &dataset.records;
let (nsamples, nfeatures) = (x.nsamples(), x.nfeatures());
if dataset.nsamples() == 0 {
return Err(FastIcaError::NotEnoughSamples);
}
// If the number of components is not set, we take the minimum of
// the number of rows and columns
let ncomponents = self
.ncomponents()
.unwrap_or_else(|| nsamples.min(nfeatures));
// The number of components cannot be greater than the minimum of
// the number of rows and columns
if ncomponents > nsamples.min(nfeatures) {
return Err(FastIcaError::InvalidValue(format!(
"ncomponents cannot be greater than the min({}, {}), got {}",
nsamples, nfeatures, ncomponents
)));
}
// We center the input by subtracting the mean of its features
// safe unwrap because we already returned an error on zero samples
let xmean = x.mean_axis(Axis(0)).unwrap();
let mut xcentered = x - &xmean.view().insert_axis(Axis(0));
// We transpose the centered matrix
xcentered = xcentered.reversed_axes();
// We whiten the matrix to remove any potential correlation between
// the components
let xcentered = xcentered.with_lapack();
let k = match xcentered.svd(true, false)? {
(Some(u), s, _) => {
let s = s.mapv(F::Lapack::cast);
(u.slice_move(s![.., ..nsamples.min(nfeatures)]) / s)
.t()
.slice(s![..ncomponents, ..])
.to_owned()
}
_ => return Err(FastIcaError::SvdDecomposition),
};
let mut xwhitened = k.dot(&xcentered).without_lapack();
let k = k.without_lapack();
// We multiply the matrix with root of the number of records
let nsamples_sqrt = F::cast(nsamples).sqrt();
xwhitened.mapv_inplace(|x| x * nsamples_sqrt);
// We initialize the de-mixing matrix with a uniform distribution
let w: Array2<f64>;
if let Some(seed) = self.random_state() {
let mut rng = Isaac64Rng::seed_from_u64(*seed as u64);
w = Array::random_using((ncomponents, ncomponents), Uniform::new(0., 1.), &mut rng);
} else {
w = Array::random((ncomponents, ncomponents), Uniform::new(0., 1.));
}
let mut w = w.mapv(F::cast);
// We find the optimized de-mixing matrix
w = self.ica_parallel(&xwhitened, &w)?;
// We whiten the de-mixing matrix
let components = w.dot(&k);
Ok(FastIca {
mean: xmean,
components,
})
}
}
impl<F: Float> FastIcaValidParams<F> {
// Parallel FastICA, Optimization step
fn ica_parallel(&self, x: &Array2<F>, w: &Array2<F>) -> Result<Array2<F>> {
let mut w = Self::sym_decorrelation(w)?;
let p = x.ncols() as f64;
for _ in 0..self.max_iter() {
let (gwtx, g_wtx) = self.gfunc().exec(&w.dot(x))?;
let lhs = gwtx.dot(&x.t()).mapv(|x| x / F::cast(p));
let rhs = &w * &g_wtx.insert_axis(Axis(1));
let wnew = Self::sym_decorrelation(&(lhs - rhs))?;
// `lim` let us check for convergence between the old and
// new weight values, we want their dot-product to almost equal one
let lim = *wnew
.outer_iter()
.zip(w.outer_iter())
.map(|(a, b)| a.dot(&b))
.collect::<Array1<F>>()
.mapv(|x| x.abs())
.mapv(|x| x - F::cast(1.))
.mapv(|x| x.abs())
.max()
.unwrap();
w = wnew;
if lim < F::cast(self.tol()) {
break;
}
}
Ok(w)
}
// Symmetric decorrelation
//
// W <- (W * W.T)^{-1/2} * W
fn sym_decorrelation(w: &Array2<F>) -> Result<Array2<F>> {
let (eig_val, eig_vec) = w.dot(&w.t()).with_lapack().eigh(UPLO::Upper)?;
let eig_val = eig_val.mapv(F::cast);
let eig_vec = eig_vec.without_lapack();
let tmp = &eig_vec
* &(eig_val.mapv(|x| x.sqrt()).mapv(|x| {
// We lower bound the float value at 1e-7 when taking the reciprocal
let lower_bound = F::cast(1e-7);
if x < lower_bound {
return lower_bound.recip();
}
x.recip()
}))
.insert_axis(Axis(0));
Ok(tmp.dot(&eig_vec.t()).dot(w))
}
}
/// Fitted FastICA model for recovering the sources
#[cfg_attr(
feature = "serde",
derive(Serialize, Deserialize),
serde(crate = "serde_crate")
)]
#[derive(Debug)]
pub struct FastIca<F> {
mean: Array1<F>,
components: Array2<F>,
}
impl<F: Float> PredictInplace<Array2<F>, Array2<F>> for FastIca<F> {
/// Recover the sources
fn predict_inplace(&self, x: &Array2<F>, y: &mut Array2<F>) {
assert_eq!(
y.shape(),
&[x.nrows(), self.components.nrows()],
"The number of data points must match the number of output targets."
);
let xcentered = x - &self.mean.view().insert_axis(Axis(0));
*y = xcentered.dot(&self.components.t());
}
fn default_target(&self, x: &Array2<F>) -> Array2<F> {
Array2::zeros((x.nrows(), self.components.nrows()))
}
}
/// Some standard non-linear functions
#[cfg_attr(
feature = "serde",
derive(Serialize, Deserialize),
serde(crate = "serde_crate")
)]
#[derive(Debug)]
pub enum GFunc {
Logcosh(f64),
Exp,
Cube,
}
impl GFunc {
// Function to select the correct non-linear function and execute it
// returning a tuple, consisting of the first and second derivatives of the
// non-linear function
fn exec<A: Float>(&self, x: &Array2<A>) -> Result<(Array2<A>, Array1<A>)> {
match self {
Self::Cube => Ok(Self::cube(x)),
Self::Exp => Ok(Self::exp(x)),
Self::Logcosh(alpha) => Self::logcosh(x, *alpha),
}
}
fn cube<A: Float>(x: &Array2<A>) -> (Array2<A>, Array1<A>) {
(
x.mapv(|x| x.powi(3)),
x.mapv(|x| A::cast(3.) * x.powi(2))
.mean_axis(Axis(1))
.unwrap(),
)
}
fn exp<A: Float>(x: &Array2<A>) -> (Array2<A>, Array1<A>) {
let exp = x.mapv(|x| -x.powi(2) / A::cast(2.));
(
x * &exp,
(x.mapv(|x| A::cast(1.) - x.powi(2)) * &exp)
.mean_axis(Axis(1))
.unwrap(),
)
}
//#[allow(clippy::manual_range_contains)]
fn logcosh<A: Float>(x: &Array2<A>, alpha: f64) -> Result<(Array2<A>, Array1<A>)> {
//if alpha < 1.0 || alpha > 2.0 {
if !(1.0..=2.0).contains(&alpha) {
return Err(FastIcaError::InvalidValue(format!(
"alpha must be between 1 and 2 inclusive, got {}",
alpha
)));
}
let alpha = A::cast(alpha);
let gx = x.mapv(|x| (x * alpha).tanh());
let g_x = gx.mapv(|x| alpha * (A::cast(1.) - x.powi(2)));
Ok((gx, g_x.mean_axis(Axis(1)).unwrap()))
}
}
#[cfg(test)]
mod tests {
use super::*;
use linfa::traits::{Fit, Predict};
use ndarray_rand::rand_distr::StudentT;
// Test to make sure the number of components set cannot be greater
// that the minimum of the number of rows and columns of the input
#[test]
fn test_ncomponents_err() {
let input = DatasetBase::from(Array::random((4, 4), Uniform::new(0.0, 1.0)));
let ica = FastIca::params().ncomponents(100);
let ica = ica.fit(&input);
assert!(ica.is_err());
}
// Test to make sure the alpha value of the `GFunc::Logcosh` is between
// 1 and 2 inclusive
#[test]
fn test_logcosh_alpha_err() {
let input = DatasetBase::from(Array::random((4, 4), Uniform::new(0.0, 1.0)));
let ica = FastIca::params().gfunc(GFunc::Logcosh(10.));
let ica = ica.fit(&input);
assert!(ica.is_err());
}
// Helper macro that produces test-cases with the pattern test_fast_ica_*
macro_rules! fast_ica_tests {
($($name:ident: $gfunc:expr,)*) => {
paste::item! {
$(
#[test]
fn [<test_fast_ica_$name>]() {
test_fast_ica($gfunc);
}
)*
}
}
}
// Tests to make sure all of the `GFunc`'s non-linear functions and the
// model itself performs well
fast_ica_tests! {
exp: GFunc::Exp, cube: GFunc::Cube, logcosh: GFunc::Logcosh(1.0),
}
// Helper function that mixes two signal sources sends it to FastICA
// and makes sure the model can demix them with considerable amount of
// accuracy
fn test_fast_ica(gfunc: GFunc) {
let nsamples = 1000;
// Center the data and make it have unit variance
let center_and_norm = |s: &mut Array2<f64>| {
let mean = s.mean_axis(Axis(0)).unwrap();
*s -= &mean.insert_axis(Axis(0));
let std = s.std_axis(Axis(0), 0.);
*s /= &std.insert_axis(Axis(0));
};
// Creaing a sawtooth signal
let mut source1 = Array::linspace(0., 100., nsamples);
source1.mapv_inplace(|x| {
let tmp = 2. * f64::sin(x);
if tmp > 0. {
return 0.;
}
-1.
});
// Creating noise using Student T distribution
let mut rng = Isaac64Rng::seed_from_u64(42);
let source2 = Array::random_using((nsamples, 1), StudentT::new(1.0).unwrap(), &mut rng);
// Column concatenating both the sources
let mut sources = concatenate![Axis(1), source1.insert_axis(Axis(1)), source2];
center_and_norm(&mut sources);
// Mixing the two sources
let phi: f64 = 0.6;
let mixing = array![[phi.cos(), phi.sin()], [phi.sin(), -phi.cos()]];
sources = mixing.dot(&sources.t());
center_and_norm(&mut sources);
sources = sources.reversed_axes();
// We fit and transform using the model to unmix the two sources
let ica = FastIca::params()
.ncomponents(2)
.gfunc(gfunc)
.random_state(42);
let sources_dataset = DatasetBase::from(sources.view());
let ica = ica.fit(&sources_dataset).unwrap();
let mut output = ica.predict(&sources);
center_and_norm(&mut output);
// Making sure the model output has the right shape
assert_eq!(output.shape(), &[1000, 2]);
// The order of the sources in the ICA output is not deterministic,
// so we account for that here
let s1 = sources.column(0);
let s2 = sources.column(1);
let mut s1_ = output.column(0);
let mut s2_ = output.column(1);
if s1_.dot(&s2).abs() > s1_.dot(&s1).abs() |
let similarity1 = s1.dot(&s1_).abs() / (nsamples as f64);
let similarity2 = s2.dot(&s2_).abs() / (nsamples as f64);
// We make sure the saw tooth signal identified by ICA using the mixed
// source is similar to the original sawtooth signal
// We ignore the noise signal's similarity measure
assert!(similarity1.max(similarity2) > 0.9);
}
}
| {
s1_ = output.column(1);
s2_ = output.column(0);
} |
test_utils.py | """ Test utilities for testing
:Author: Arthur Goldberg <[email protected]>
:Date: 2019-10-31
:Copyright: 2019, Karr Lab
:License: MIT
"""
from scipy.constants import Avogadro
import os
import shutil
import tempfile
import unittest
from de_sim.simulation_config import SimulationConfig
from wc_sim.multialgorithm_simulation import MultialgorithmSimulation
from wc_sim.sim_config import WCSimulationConfig
from wc_sim.simulation import Simulation
from wc_sim.testing.make_models import MakeModel
from wc_sim.testing.utils import (check_simul_results, plot_expected_vs_simulated, get_expected_dependencies,
create_run_directory)
class TestTestingUtils(unittest.TestCase):
def setUp(self): | self.results_dir = tempfile.mkdtemp(dir=self.tmp_dir)
self.args = dict(results_dir=tempfile.mkdtemp(dir=self.tmp_dir),
checkpoint_period=1)
de_simulation_config = SimulationConfig(max_time=10, output_dir=tempfile.mkdtemp(dir=self.tmp_dir))
self.wc_sim_config = WCSimulationConfig(de_simulation_config, checkpoint_period=1)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def test_check_simul_results(self):
init_volume = 1E-16
init_density = 1000
molecular_weight = 100.
default_species_copy_number = 10_000
init_accounted_mass = molecular_weight * default_species_copy_number / Avogadro
init_accounted_density = init_accounted_mass / init_volume
expected_initial_values_compt_1 = dict(
init_volume=init_volume,
init_accounted_mass=init_accounted_mass,
init_mass= init_volume * init_density,
init_density=init_density,
init_accounted_density=init_accounted_density,
accounted_fraction = init_accounted_density / init_density
)
expected_initial_values = {'compt_1': expected_initial_values_compt_1}
model = MakeModel.make_test_model('1 species, 1 reaction',
init_vols=[expected_initial_values_compt_1['init_volume']],
init_vol_stds=[0],
density=init_density,
molecular_weight=molecular_weight,
default_species_copy_number=default_species_copy_number,
default_species_std=0,
submodel_framework='WC:deterministic_simulation_algorithm')
multialgorithm_simulation = MultialgorithmSimulation(model, self.wc_sim_config)
_, dynamic_model = multialgorithm_simulation.build_simulation()
check_simul_results(self, dynamic_model, None, expected_initial_values=expected_initial_values)
# test dynamics
simulation = Simulation(model)
results_dir = simulation.run(max_time=2, **self.args).results_dir
nan = float('NaN')
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 9999., 9998.]})
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[nan, nan, nan]})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000.]})
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 9999., 9998.]},
rel_tol=1E-5)
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[1.000e-13, 9.999e-14, 9.998e-14]}})
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[nan, nan, nan]}})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}},
rel_tol=0)
plots_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'results'))
os.makedirs(plots_dir, exist_ok=True)
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]},
expected_property_trajectories=\
{'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_property_trajectories=\
{'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[nan, nan, nan]},
expected_property_trajectories=\
{'compt_1':
{'mass':[nan, nan, nan]}})
def test_expected_dependencies(self):
eds = get_expected_dependencies()
self.assertEqual(eds['DynamicStopCondition']['reaction_9'], {'stop_condition_7'})
self.assertEqual(eds['DynamicStopCondition']['reaction_10'], set())
def test_create_run_directory(self):
def run_dir_test(self, run_dir):
self.assertTrue(os.path.isdir(run_dir))
os.rmdir(run_dir)
# rm the date dir if it's empty
try:
date_dir = os.path.abspath(os.path.join(run_dir, ".."))
os.rmdir(date_dir)
except OSError:
pass
run_dir_test(self, create_run_directory())
run_dir_test(self, create_run_directory(base_dir='/tmp/runs', in_repo=False)) | self.tmp_dir = tempfile.mkdtemp() |
open_archive_test.go | package zipartifacts
import (
"archive/zip"
"context"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func TestOpenHTTPArchive(t *testing.T) {
const (
zipFile = "test.zip"
entryName = "hello.txt"
contents = "world"
testRoot = "testdata/public"
)
require.NoError(t, os.MkdirAll(testRoot, 0755))
f, err := os.Create(filepath.Join(testRoot, zipFile))
require.NoError(t, err, "create file")
defer f.Close()
| require.NoError(t, err, "create zip entry")
_, err = fmt.Fprint(w, contents)
require.NoError(t, err, "write zip entry contents")
require.NoError(t, zw.Close(), "close zip writer")
require.NoError(t, f.Close(), "close file")
srv := httptest.NewServer(http.FileServer(http.Dir(testRoot)))
defer srv.Close()
zr, err := OpenArchive(context.Background(), srv.URL+"/"+zipFile)
require.NoError(t, err, "call OpenArchive")
require.Len(t, zr.File, 1)
zf := zr.File[0]
require.Equal(t, entryName, zf.Name, "zip entry name")
entry, err := zf.Open()
require.NoError(t, err, "get zip entry reader")
defer entry.Close()
actualContents, err := ioutil.ReadAll(entry)
require.NoError(t, err, "read zip entry contents")
require.Equal(t, contents, string(actualContents), "compare zip entry contents")
}
func TestOpenHTTPArchiveNotSendingAcceptEncodingHeader(t *testing.T) {
requestHandler := func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, "GET", r.Method)
require.Nil(t, r.Header["Accept-Encoding"])
w.WriteHeader(http.StatusOK)
}
srv := httptest.NewServer(http.HandlerFunc(requestHandler))
defer srv.Close()
OpenArchive(context.Background(), srv.URL)
} | zw := zip.NewWriter(f)
w, err := zw.Create(entryName) |
intrinsic.py | import numpy as np
import scipy.signal
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.evaluation.postprocessing import Postprocessing
from algorithms.curiosity import INTRINSIC_REWARD
INTRINSIC_VALUE_TARGETS = "intrinsic_value_targets"
INTRINSIC_VF_PREDS = "intrinsic_vf_preds"
def discount(x, gamma):
|
def compute_advantages_intrinsic(rollout,
last_r,
last_intrinsic_r,
gamma=0.9,
intrinsic_gamma=0.9,
lambda_=1.0,
intrinsic_lambda_=1.0):
"""
Given a rollout, compute its value targets and the advantage. Assumes we are using separate
value function heads for the extrinsic and intrinsic rewards
Args:
rollout (SampleBatch): SampleBatch of a single trajectory
last_r (float): Value estimation for last observation
gamma (float): Discount factor
intrinsic_gamma (float): Discount factor
lambda_ (float): Parameter for GAE
intrinsic_lambda_ (float): Parameter for intrinsic GAE
Returns:
SampleBatch (SampleBatch): Object with experience from rollout and
processed rewards.
"""
traj = {}
trajsize = len(rollout[SampleBatch.ACTIONS])
for key in rollout:
traj[key] = np.stack(rollout[key])
# Extrinsic value predictions and targets
vpred_t = np.concatenate([rollout[SampleBatch.VF_PREDS], np.array([last_r])])
delta_t = (traj[SampleBatch.REWARDS] + gamma * vpred_t[1:] - vpred_t[:-1])
advantages = discount(delta_t, gamma * lambda_)
traj[Postprocessing.VALUE_TARGETS] = (
advantages + traj[SampleBatch.VF_PREDS]).copy().astype(np.float32)
# Intrinsic value predictions
intrinsic_vpred_t = np.concatenate([rollout[INTRINSIC_VF_PREDS], np.array([last_intrinsic_r])])
intrinsic_delta_t = (traj[INTRINSIC_REWARD] + intrinsic_gamma * intrinsic_vpred_t[1:] - intrinsic_vpred_t[:-1])
intrinsic_advantages = discount(intrinsic_delta_t, intrinsic_gamma * intrinsic_lambda_)
traj[INTRINSIC_VALUE_TARGETS] = (
intrinsic_advantages + traj[INTRINSIC_VF_PREDS]).copy().astype(np.float32)
traj[Postprocessing.ADVANTAGES] = (advantages + intrinsic_advantages).copy().astype(np.float32)
assert all(val.shape[0] == trajsize for val in traj.values()), \
"Rollout stacked incorrectly!"
return SampleBatch(traj)
| return scipy.signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1] |
error.rs | #[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("url encoding error: {0}")] | #[error("http status code error: {0}")]
StatusCode(reqwest::StatusCode),
}
pub type Result<T> = std::result::Result<T, Error>; | UrlEncoding(#[from] serde_urlencoded::ser::Error),
#[error("http client error: {0}")]
Http(#[from] reqwest::Error), |
payment_method_ext.rs | use serde_derive::{Deserialize, Serialize};
use crate::client::{Client, Response};
use crate::ids::{CustomerId, PaymentMethodId};
use crate::resources::PaymentMethod;
/// The parameters for `PaymentMethod::attach`
///
/// For more details see <https://stripe.com/docs/api/payment_methods/attach>.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct AttachPaymentMethod {
pub customer: CustomerId,
}
impl PaymentMethod {
/// Attach a payment method to a customer
///
/// For more details see <https://stripe.com/docs/api/payment_methods/attach>.
pub fn | (
client: &Client,
payment_method_id: &PaymentMethodId,
params: AttachPaymentMethod,
) -> Response<PaymentMethod> {
client.post_form(&format!("/payment_methods/{}/attach", payment_method_id), params)
}
/// Detach a PaymentMethod from a Customer
///
/// For more details see <https://stripe.com/docs/api/payment_methods/detach>.
pub fn detach(client: &Client, payment_method_id: &PaymentMethodId) -> Response<PaymentMethod> {
client.post(&format!("/payment_methods/{}/detach", payment_method_id))
}
}
| attach |
install.py | #!/usr/bin/env python2.7
# -*- coding:UTF-8 -*-2
u"""install.py
Copyright (c) 2019 Yukio Kuro
This software is released under BSD license.
Linux用インストーラ。
"""
import os as __os
import sys as __sys
import shutil as __shutil
__shell_script = __os.path.join(__sys.exec_prefix, "games", "starseeker")
__icon = __os.path.join(__sys.exec_prefix, "share", "icons", "starseeker.png")
__source = __os.path.join(__sys.exec_prefix, "share", "games", "StarSeeker")
__desktop_entry = __os.path.join(
__sys.exec_prefix, "share", "applications", "StarSeeker.desktop")
def __install():
u"""ゲームをインストール。
"""
print "Start installation."
print "---- Set shell script ----"
startup_file = __os.path.join("linux", "startup.sh")
__os.chmod(startup_file, 0o755)
__shutil.copy(startup_file, __shell_script)
print "---- Set icon ----"
icon_file = __os.path.join("linux", "icon.png")
__os.chmod(icon_file, 0o644)
__shutil.copy(icon_file, __icon)
print "---- Set source ----"
for root, dirs, files in __os.walk("Source"):
for dir_ in dirs:
__os.chmod(__os.path.join(root, dir_), 0o755)
for file_ in files:
__os.chmod(__os.path.join(root, file_), 0o755)
if __os.path.exists(__source):
__shutil.rmtree(__source)
__shutil.copytree("Source", __source)
print "---- Set desktop entry ----"
entry_file = __os.path.join("linux", "Entry.desktop")
__os.chmod(entry_file, 0o644)
__shutil.copy(entry_file, __desktop_entry)
print "Installation is finished."
def __uninstall():
u"""ゲームをアンインストール。
| print "Start uninstallation."
print "---- Remove shell script ----"
try:
__os.remove(__shell_script)
except OSError:
print "Shell script does not exsit."
print "---- Remove icon ----"
try:
__os.remove(__icon)
except OSError:
print "Icon does not exsit."
print "---- Remove source ----"
if __os.path.exists(__source):
__shutil.rmtree(__source)
else:
print "Source does not exsit."
print "---- Remove desktop entry ----"
try:
__os.remove(__desktop_entry)
except OSError:
print "Desktop entry does not exsit."
print "Uninstallation is finished."
if __name__ == '__main__':
if 1 < len(__sys.argv) and __sys.argv[-1] == "-u":
__uninstall()
else:
__install()
| """
|
Simulation_virus_BB.py | """
Bigger scale simulation of a virus spread in a city.
This would have been the better opt for the project, as it uses geospatial visualisation (which is not in this code)
and data gathered from a a ride share, a specific city, their population, and their public transport data.
I still don't understand how geospatial visualisation works (I think I will look more into it in the holidays)
The simulation uses mathematical equations on how a virus would spread and includes its recovery rates.
Technically this code works (I think)...
It is just missing it's data and its visuals
"""
import numpy as np
from collections import namedtuple
Param = namedtuple('Param', 'R0 DE DI I0 HopitalisationRate HospitalIters')
# I0 is the distribution of infected people at time t=0, if None then randomly choose inf number of people
# flow is a 3D matrix of dimentions r x n x n (i.e., 84 x 549 x 549),
# flow[t mod r] is the desired OD matrix at time t.
def | (par, distr, flow, alpha, iterations, inf):
r = flow.shape[0]
n = flow.shape[1]
N = distr[0].sum() #total population, we assume that N = sum(flow)
Svec = distr[0].copy()
Evec = np.zeros(n)
Ivec = np.zeros(n)
Rvec = np.zeros(n)
if par.I0 is None:
initial = np.zeros(n)
# randomly choose inf infections
for i in range(inf):
loc = np.random.randint(n)
if (Svec[loc] > initial[loc]):
initial[loc] += 1.0
else:
initial = par.I0
assert ((Svec < initial).sum() == 0)
Svec -= initial
Ivec += initial
res = np.zeros((iterations, 5))
res[0,:] = [Svec.sum(), Evec.sum(), Ivec.sum(), Rvec.sum(), 0]
realflow = flow.copy()
realflow = realflow / realflow.sum(axis=2)[:,:, np.newaxis]
realflow = alpha * realflow
history = np.zeros((iterations, 5, n))
history[0,0,:] = Svec
history[0,1,:] = Evec
history[0,2,:] = Ivec
history[0,3,:] = Rvec
eachIter = np.zeros(iterations + 1)
# run simulation
for iter in range(0, iterations - 1):
realOD = realflow[iter % r]
d = distr[iter % r] + 1
if ((d>N+1).any()):
print("Houston, we have a problem!")
return res, history
# N = S + E + I + R
newE = Svec * Ivec / d * (par.R0 / par.DI)
newI = Evec / par.DE
newR = Ivec / par.DI
Svec -= newE
Svec = (Svec + np.matmul(Svec.reshape(1,n), realOD) - Svec * realOD.sum(axis=1))
Evec = Evec + newE - newI
Evec = (Evec + np.matmul(Evec.reshape(1,n), realOD) - Evec * realOD.sum(axis=1))
Ivec = Ivec + newI - newR
Ivec = (Ivec + np.matmul(Ivec.reshape(1,n), realOD) - Ivec * realOD.sum(axis=1))
Rvec += newR
Rvec = (Rvec + np.matmul(Rvec.reshape(1,n), realOD) - Rvec * realOD.sum(axis=1))
res[iter + 1,:] = [Svec.sum(), Evec.sum(), Ivec.sum(), Rvec.sum(), 0]
eachIter[iter + 1] = newI.sum()
res[iter + 1, 4] = eachIter[max(0, iter - par.HospitalIters) : iter].sum() * par.HospitalisationRate
history[iter + 1,0,:] = Svec
history[iter + 1,1,:] = Evec
history[iter + 1,2,:] = Ivec
history[iter + 1,3,:] = Rvec
return res, history | seir |
pathtool.py | import os
def find_base(p, bases):
for base_name, base_path in bases.items():
r = os.path.relpath(p, base_path)
if r and (r == '.' or r[0] != '.'):
|
return None
| return base_name, r |
popularity_predictor.py | import pandas as pd
import subprocess, os
import src.utils.loader as loader
def create_test_arff(participant, test_df, aux_path):
arff_text = "@relation summary_features \n\n" \
"@attribute n_faces numeric\n" \
"@attribute avg_confidence_faces numeric\n" \
"@attribute std_confidence_faces numeric\n" \
"@attribute avg_relativeSize_faces numeric\n" \
"@attribute std_relativeSize_faces numeric\n" \
"@attribute avg_thirdRule_x numeric\n" \
"@attribute std_thirdRule_x numeric\n" \
"@attribute avg_thirdRule_y numeric\n" \
"@attribute std_thirdRule_y numeric\n" \
"@attribute num_clts numeric\n" \
"@attribute avg_silhouette numeric\n" \
"@attribute avg_intra_clt_dist numeric\n" \
"@attribute avg_inter_clt_dist numeric\n" \
"@attribute faces_in_noise_clt numeric\n" \
"@attribute num_core_samples numeric\n" \
"@attribute avg_imgs_clt numeric\n" \
"@attribute avg_std_silhouette numeric\n" \
"@attribute avg_std_intra_clt_dist numeric\n" \
"@attribute avg_std_inter_clt_dist numeric\n" \
"@attribute avg_n_core_samples numeric\n" \
"@attribute std_n_core_samples numeric\n" \
"@attribute GTrends_popularity numeric\n" \
"@attribute label {1,0}\n\n" \
"@data\n"
data = test_df.loc[test_df["id"]==participant]
data = data.drop(columns="id")
data_str = ""
for ele in data.values[0]:
data_str += str(ele)+","
data_str = data_str[0:-3]
arff_text+=data_str
print(arff_text)
f = open(aux_path, "w")
f.write(arff_text)
def evaluate_test_arff(model_path, test_arff_path, out_path):
|
def remove_lines(path_csv):
with open(path_csv, 'r') as fin:
data = fin.read().splitlines(True)
with open(path_csv, 'w') as fout:
fout.writelines(data[4:]) #en 4 las cabeceras
fout.close()
if __name__ == "__main__":
th = "05"
path_model = "../../data/models/popularity_module/CLASIF/th"+th+"/RandomForest.model"
complete_df_ids = "../../data/datasets/popularity_module_features/train/summary_features_participants_classification_th"+th+".csv"
aux_path = "../../data/datasets/popularity_module_features/aux_test.arff"
out_path_prediction = "../../data/datasets/popularity_module_features/aux_prediction.csv"
complete_df = pd.read_csv(complete_df_ids, header=0, sep=",")
bash_test_model = ""
path_participants = "../../data/datasets/DATASET_GOOGLE_IMGS/participants/"
list_participants = loader.load_list_of_tertulianos(path_participants, "participants_complete_rtve2018",".csv")
#list_participants = [participant.replace(" ", "_") for participant in part]
df_popularity = pd.DataFrame([], columns=["prediction", "popular", "id"])
out_path_popularity_df = "../../data/results/popularity_models_output/popularity_df_th"+th+".csv"
for participant in list_participants:
participant = participant.replace("_", " ")
create_test_arff(participant, complete_df, aux_path)
df_participant = evaluate_test_arff(path_model, aux_path, out_path_prediction)
df_popularity = df_popularity.append(pd.DataFrame([[df_participant["predicted"][0].split(":")[-1], df_participant["predicted"][0].split(":")[-1]=="1", participant
]], columns=["prediction", "popular", "id"]))
df_popularity.to_csv(out_path_popularity_df, sep=";", header=True, index=False)
| """
Obtain predictions of test_file using the trained model in model_path
:param output_folder:
:param output_name:
:param model_path:
:param test_file:
"""
# PREDICTIONS FILE HEADERS: INSTANCE, ACTUAL, PREDICTED, ERROR
bash_file_path = "../../data/bash_scripts/explorer_test_model.sh "
with open(out_path, 'w') as fi:
fi.close()
command = "".join([bash_file_path, test_arff_path, " ", model_path, " ", out_path])
print(command)
subprocess.call(command, shell=True)
remove_lines(out_path) # remove headers of prediction file
df_participant = pd.read_csv(out_path, header=0, sep=",")
return df_participant |
value.rs | use std::{
collections::HashMap,
mem,
panic::{catch_unwind, UnwindSafe},
};
use bytes::{Buf, BufMut, Bytes, BytesMut};
use chrono::{
DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, Offset, TimeZone, Timelike,
};
use chrono_tz::Tz;
pub use duration::Duration;
pub use node::Node;
pub use path::Path;
pub use point_2d::Point2D;
pub use point_3d::Point3D;
pub use relationship::Relationship;
pub use unbound_relationship::UnboundRelationship;
use crate::error::*;
use crate::serialization::*;
pub(crate) mod conversions;
pub(crate) mod duration;
pub(crate) mod node;
pub(crate) mod path;
pub(crate) mod point_2d;
pub(crate) mod point_3d;
pub(crate) mod relationship;
pub(crate) mod unbound_relationship;
pub(crate) const MARKER_FALSE: u8 = 0xC2;
pub(crate) const MARKER_TRUE: u8 = 0xC3;
pub(crate) const MARKER_INT_8: u8 = 0xC8;
pub(crate) const MARKER_INT_16: u8 = 0xC9;
pub(crate) const MARKER_INT_32: u8 = 0xCA;
pub(crate) const MARKER_INT_64: u8 = 0xCB;
pub(crate) const MARKER_FLOAT: u8 = 0xC1;
pub(crate) const MARKER_SMALL_BYTES: u8 = 0xCC;
pub(crate) const MARKER_MEDIUM_BYTES: u8 = 0xCD;
pub(crate) const MARKER_LARGE_BYTES: u8 = 0xCE;
pub(crate) const MARKER_TINY_LIST: u8 = 0x90;
pub(crate) const MARKER_SMALL_LIST: u8 = 0xD4;
pub(crate) const MARKER_MEDIUM_LIST: u8 = 0xD5;
pub(crate) const MARKER_LARGE_LIST: u8 = 0xD6;
pub(crate) const MARKER_TINY_MAP: u8 = 0xA0;
pub(crate) const MARKER_SMALL_MAP: u8 = 0xD8;
pub(crate) const MARKER_MEDIUM_MAP: u8 = 0xD9;
pub(crate) const MARKER_LARGE_MAP: u8 = 0xDA;
pub(crate) const MARKER_NULL: u8 = 0xC0;
pub(crate) const MARKER_TINY_STRING: u8 = 0x80;
pub(crate) const MARKER_SMALL_STRING: u8 = 0xD0;
pub(crate) const MARKER_MEDIUM_STRING: u8 = 0xD1;
pub(crate) const MARKER_LARGE_STRING: u8 = 0xD2;
pub(crate) const MARKER_TINY_STRUCT: u8 = 0xB0;
pub(crate) const MARKER_SMALL_STRUCT: u8 = 0xDC;
pub(crate) const MARKER_MEDIUM_STRUCT: u8 = 0xDD;
pub(crate) const SIGNATURE_NODE: u8 = 0x4E;
pub(crate) const SIGNATURE_RELATIONSHIP: u8 = 0x52;
pub(crate) const SIGNATURE_PATH: u8 = 0x50;
pub(crate) const SIGNATURE_UNBOUND_RELATIONSHIP: u8 = 0x72;
pub(crate) const SIGNATURE_DATE: u8 = 0x44;
pub(crate) const SIGNATURE_TIME: u8 = 0x54;
pub(crate) const SIGNATURE_DATE_TIME_OFFSET: u8 = 0x46;
pub(crate) const SIGNATURE_DATE_TIME_ZONED: u8 = 0x66;
pub(crate) const SIGNATURE_LOCAL_TIME: u8 = 0x74;
pub(crate) const SIGNATURE_LOCAL_DATE_TIME: u8 = 0x64;
pub(crate) const SIGNATURE_DURATION: u8 = 0x45;
pub(crate) const SIGNATURE_POINT_2D: u8 = 0x58;
pub(crate) const SIGNATURE_POINT_3D: u8 = 0x59;
/// An enum that can hold values of all Bolt-compatible types.
///
/// Conversions are provided for most types, and are usually pretty intuitive ([`bool`] to
/// [`Value::Boolean`], [`i32`] to [`Value::Integer`], [`HashMap`](std::collections::HashMap) to
/// [`Value::Map`], etc.), but some types have no analog in Rust, like a timezone-aware time. For
/// such types, conversions are still provided, but may feel a bit clunky (for example, you can
/// convert a `(`[`NaiveTime`](chrono::NaiveTime)`, impl `[`Offset`](chrono::Offset)`)` tuple into
/// a [`Value::Time`]).
// TODO: Implement serde traits
#[derive(Debug, Clone, PartialEq)]
pub enum Value {
// V1-compatible value types
Boolean(bool),
Integer(i64),
Float(f64),
Bytes(Vec<u8>),
List(Vec<Value>),
Map(HashMap<String, Value>),
Null,
String(String),
Node(Node),
Relationship(Relationship),
Path(Path),
UnboundRelationship(UnboundRelationship),
// V2+-compatible value types
Date(NaiveDate), // A date without a time zone, a.k.a. LocalDate
Time(NaiveTime, FixedOffset), // A time with a UTC offset, a.k.a. OffsetTime
DateTimeOffset(DateTime<FixedOffset>), // A date-time with a UTC offset, a.k.a. OffsetDateTime
DateTimeZoned(DateTime<Tz>), // A date-time with a time zone ID, a.k.a. ZonedDateTime
LocalTime(NaiveTime), // A time without a time zone
LocalDateTime(NaiveDateTime), // A date-time without a time zone
Duration(Duration),
Point2D(Point2D),
Point3D(Point3D),
}
impl Eq for Value {
fn assert_receiver_is_total_eq(&self) {
if let Value::Float(_) | Value::Point2D(_) | Value::Point3D(_) = self {
panic!("{:?} does not impl Eq", self)
}
}
}
impl BoltValue for Value {
fn marker(&self) -> SerializeResult<u8> |
fn serialize(self) -> SerializeResult<Bytes> {
let marker = self.marker()?;
match self {
Value::Boolean(true) => Ok(Bytes::from_static(&[MARKER_TRUE])),
Value::Boolean(false) => Ok(Bytes::from_static(&[MARKER_FALSE])),
Value::Integer(integer) => {
// Worst case is marker + 64-bit int
let mut bytes =
BytesMut::with_capacity(mem::size_of::<u8>() + mem::size_of::<i64>());
bytes.put_u8(marker);
match integer {
-9_223_372_036_854_775_808..=-2_147_483_649
| 2_147_483_648..=9_223_372_036_854_775_807 => {
bytes.put_i64(integer);
}
-2_147_483_648..=-32_769 | 32_768..=2_147_483_647 => {
bytes.put_i32(integer as i32);
}
-32_768..=-129 | 128..=32_767 => {
bytes.put_i16(integer as i16);
}
-128..=-17 => {
bytes.put_i8(integer as i8);
}
-16..=127 => {} // The marker is the value
}
Ok(bytes.freeze())
}
Value::Float(f) => {
let mut bytes =
BytesMut::with_capacity(mem::size_of::<u8>() + mem::size_of::<f64>());
bytes.put_u8(marker);
bytes.put_f64(f);
Ok(bytes.freeze())
}
Value::Bytes(bytes) => {
// Worst case is a large ByteArray, with marker byte, 32-bit size value, and length
let mut buf = BytesMut::with_capacity(
mem::size_of::<u8>() + mem::size_of::<u32>() + bytes.len(),
);
buf.put_u8(marker);
match bytes.len() {
0..=255 => buf.put_u8(bytes.len() as u8),
256..=65_535 => buf.put_u16(bytes.len() as u16),
65_536..=2_147_483_647 => buf.put_u32(bytes.len() as u32),
_ => return Err(SerializationError::ValueTooLarge(bytes.len())),
}
buf.put_slice(&bytes);
Ok(buf.freeze())
}
Value::List(list) => {
let length = list.len();
let mut total_value_bytes: usize = 0;
let mut value_bytes_vec: Vec<Bytes> = Vec::with_capacity(length);
for value in list {
let value_bytes = value.serialize()?;
total_value_bytes += value_bytes.len();
value_bytes_vec.push(value_bytes);
}
// Worst case is a large List, with marker byte, 32-bit size value, and all the
// Value bytes
let mut bytes = BytesMut::with_capacity(
mem::size_of::<u8>() + mem::size_of::<u32>() + total_value_bytes,
);
bytes.put_u8(marker);
match length {
0..=15 => {} // The marker contains the length
16..=255 => bytes.put_u8(length as u8),
256..=65_535 => bytes.put_u16(length as u16),
65_536..=4_294_967_295 => bytes.put_u32(length as u32),
_ => return Err(SerializationError::ValueTooLarge(length)),
}
for value_bytes in value_bytes_vec {
bytes.put(value_bytes);
}
Ok(bytes.freeze())
}
Value::Map(map) => {
let length = map.len();
let mut total_value_bytes: usize = 0;
let mut value_bytes_vec: Vec<Bytes> = Vec::with_capacity(length);
for (key, val) in map {
let key_bytes: Bytes = Value::String(key).serialize()?;
let val_bytes: Bytes = val.serialize()?;
total_value_bytes += key_bytes.len() + val_bytes.len();
value_bytes_vec.push(key_bytes);
value_bytes_vec.push(val_bytes);
}
// Worst case is a large Map, with marker byte, 32-bit size value, and all the
// Value bytes
let mut bytes = BytesMut::with_capacity(
mem::size_of::<u8>() + mem::size_of::<u32>() + total_value_bytes,
);
bytes.put_u8(marker);
match length {
0..=15 => {} // The marker contains the length
16..=255 => bytes.put_u8(length as u8),
256..=65_535 => bytes.put_u16(length as u16),
65_536..=4_294_967_295 => bytes.put_u32(length as u32),
_ => return Err(SerializationError::ValueTooLarge(length)),
}
for value_bytes in value_bytes_vec {
bytes.put(value_bytes);
}
Ok(bytes.freeze())
}
Value::Null => Ok(Bytes::from_static(&[MARKER_NULL])),
Value::String(string) => {
let length = string.len();
// Worst case is a large string, with marker byte, 32-bit size value, and length
let mut bytes =
BytesMut::with_capacity(mem::size_of::<u8>() + mem::size_of::<u32>() + length);
bytes.put_u8(marker);
match length {
0..=15 => {} // The marker contains the length
16..=255 => bytes.put_u8(length as u8),
256..=65_535 => bytes.put_u16(length as u16),
65_536..=4_294_967_295 => bytes.put_u32(length as u32),
_ => return Err(SerializationError::ValueTooLarge(length)),
}
bytes.put(string.as_bytes());
Ok(bytes.freeze())
}
Value::Node(node) => node.serialize(),
Value::Relationship(rel) => rel.serialize(),
Value::Path(path) => path.serialize(),
Value::UnboundRelationship(unbound_rel) => unbound_rel.serialize(),
Value::Date(date) => Ok(vec![marker, SIGNATURE_DATE]
.into_iter()
.chain(
// Days since UNIX epoch
Value::from((date - NaiveDate::from_ymd(1970, 1, 1)).num_days()).serialize()?,
)
.collect()),
Value::Time(time, offset) => Ok(vec![marker, SIGNATURE_TIME]
.into_iter()
.chain(
// Nanoseconds since midnight
// Will not overflow: u32::MAX * 1_000_000_000 + u32::MAX < i64::MAX
Value::from(
time.num_seconds_from_midnight() as i64 * 1_000_000_000
+ time.nanosecond() as i64,
)
.serialize()?,
)
.chain(
// Timezone offset
Value::from(offset.fix().local_minus_utc()).serialize()?,
)
.collect()),
Value::DateTimeOffset(date_time_offset) => Ok(vec![marker, SIGNATURE_DATE_TIME_OFFSET]
.into_iter()
.chain(
// Seconds since UNIX epoch
Value::from(date_time_offset.timestamp()).serialize()?,
)
.chain(
// Nanoseconds
Value::from(date_time_offset.nanosecond() as i64).serialize()?,
)
.chain(
// Timezone offset
Value::from(date_time_offset.offset().fix().local_minus_utc()).serialize()?,
)
.collect()),
Value::DateTimeZoned(date_time_zoned) => {
Ok(vec![marker, SIGNATURE_DATE_TIME_ZONED]
.into_iter()
// Seconds since UNIX epoch
.chain(Value::from(date_time_zoned.timestamp()).serialize()?)
// Nanoseconds
.chain(Value::from(date_time_zoned.nanosecond() as i64).serialize()?)
// Timezone ID
.chain(Value::from(date_time_zoned.timezone().name().to_string()).serialize()?)
.collect())
}
Value::LocalTime(local_time) => Ok(vec![marker, SIGNATURE_LOCAL_TIME]
.into_iter()
.chain(
Value::from(
// Will not overflow: u32::MAX * 1_000_000_000 + u32::MAX < i64::MAX
local_time.num_seconds_from_midnight() as i64 * 1_000_000_000
+ local_time.nanosecond() as i64,
)
.serialize()?,
)
.collect()),
Value::LocalDateTime(local_date_time) => Ok(vec![marker, SIGNATURE_LOCAL_DATE_TIME]
.into_iter()
// Seconds since UNIX epoch
.chain(Value::from(local_date_time.timestamp()).serialize()?)
// Nanoseconds
.chain(Value::from(local_date_time.nanosecond() as i64).serialize()?)
.collect()),
Value::Duration(duration) => duration.serialize(),
Value::Point2D(point_2d) => point_2d.serialize(),
Value::Point3D(point_3d) => point_3d.serialize(),
}
}
fn deserialize<B: Buf + UnwindSafe>(mut bytes: B) -> DeserializeResult<(Self, B)> {
catch_unwind(move || {
let marker = bytes.get_u8();
match marker {
// Boolean
MARKER_TRUE => Ok((Value::Boolean(true), bytes)),
MARKER_FALSE => Ok((Value::Boolean(false), bytes)),
// Tiny int
marker if (-16..=127).contains(&(marker as i8)) => {
Ok((Value::Integer(marker as i8 as i64), bytes))
}
// Other int types
MARKER_INT_8 => Ok((Value::Integer(bytes.get_i8() as i64), bytes)),
MARKER_INT_16 => Ok((Value::Integer(bytes.get_i16() as i64), bytes)),
MARKER_INT_32 => Ok((Value::Integer(bytes.get_i32() as i64), bytes)),
MARKER_INT_64 => Ok((Value::Integer(bytes.get_i64()), bytes)),
// Float
MARKER_FLOAT => Ok((Value::Float(bytes.get_f64()), bytes)),
// Byte array
MARKER_SMALL_BYTES | MARKER_MEDIUM_BYTES | MARKER_LARGE_BYTES => {
let size = match marker {
MARKER_SMALL_BYTES => bytes.get_u8() as usize,
MARKER_MEDIUM_BYTES => bytes.get_u16() as usize,
MARKER_LARGE_BYTES => bytes.get_u32() as usize,
_ => unreachable!(),
};
Ok((Value::Bytes(bytes.copy_to_bytes(size).to_vec()), bytes))
}
// List
marker
if (MARKER_TINY_LIST..=(MARKER_TINY_LIST | 0x0F)).contains(&marker)
|| matches!(
marker,
MARKER_SMALL_LIST | MARKER_MEDIUM_LIST | MARKER_LARGE_LIST
) =>
{
let size = match marker {
marker
if (MARKER_TINY_LIST..=(MARKER_TINY_LIST | 0x0F)).contains(&marker) =>
{
0x0F & marker as usize
}
MARKER_SMALL_LIST => bytes.get_u8() as usize,
MARKER_MEDIUM_LIST => bytes.get_u16() as usize,
MARKER_LARGE_LIST => bytes.get_u32() as usize,
_ => unreachable!(),
};
let mut list: Vec<Value> = Vec::with_capacity(size);
for _ in 0..size {
let (v, b) = Value::deserialize(bytes)?;
bytes = b;
list.push(v);
}
Ok((Value::List(list), bytes))
}
// Map
marker
if (MARKER_TINY_MAP..=(MARKER_TINY_MAP | 0x0F)).contains(&marker)
|| matches!(
marker,
MARKER_SMALL_MAP | MARKER_MEDIUM_MAP | MARKER_LARGE_MAP
) =>
{
let size = match marker {
marker
if (MARKER_TINY_MAP..=(MARKER_TINY_MAP | 0x0F)).contains(&marker) =>
{
0x0F & marker as usize
}
MARKER_SMALL_MAP => bytes.get_u8() as usize,
MARKER_MEDIUM_MAP => bytes.get_u16() as usize,
MARKER_LARGE_MAP => bytes.get_u32() as usize,
_ => unreachable!(),
};
let mut hash_map: HashMap<std::string::String, Value> =
HashMap::with_capacity(size);
for _ in 0..size {
let (value, remaining) = Value::deserialize(bytes)?;
bytes = remaining;
match value {
Value::String(key) => {
let (value, remaining) = Value::deserialize(bytes)?;
bytes = remaining;
hash_map.insert(key, value);
}
other => return Err(ConversionError::FromValue(other).into()),
}
}
Ok((Value::Map(hash_map), bytes))
}
// Null
MARKER_NULL => Ok((Value::Null, bytes)),
// String
marker
if (MARKER_TINY_STRING..=(MARKER_TINY_STRING | 0x0F)).contains(&marker)
|| matches!(
marker,
MARKER_SMALL_STRING | MARKER_MEDIUM_STRING | MARKER_LARGE_STRING
) =>
{
let size = match marker {
marker
if (MARKER_TINY_STRING..=(MARKER_TINY_STRING | 0x0F))
.contains(&marker) =>
{
0x0F & marker as usize
}
MARKER_SMALL_STRING => bytes.get_u8() as usize,
MARKER_MEDIUM_STRING => bytes.get_u16() as usize,
MARKER_LARGE_STRING => bytes.get_u32() as usize,
_ => unreachable!(),
};
Ok((
Value::String(String::from_utf8(bytes.copy_to_bytes(size).to_vec())?),
bytes,
))
}
// Structure
marker
if (MARKER_TINY_STRUCT..=(MARKER_TINY_STRUCT | 0x0F)).contains(&marker)
|| matches!(marker, MARKER_SMALL_STRUCT | MARKER_MEDIUM_STRUCT) =>
{
deserialize_structure(marker, bytes)
}
_ => Err(DeserializationError::InvalidMarkerByte(marker)),
}
})
.map_err(|_| DeserializationError::Panicked)?
}
}
macro_rules! deserialize_struct {
($name:ident, $bytes:ident) => {{
let (value, remaining) = $name::deserialize($bytes)?;
$bytes = remaining;
Ok((Value::$name(value), $bytes))
}};
}
macro_rules! deserialize_variant {
($name:ident, $bytes:ident) => {{
let (value, remaining) = Value::deserialize($bytes)?;
$bytes = remaining;
if let Value::$name(inner) = value {
inner
} else {
return Err(ConversionError::FromValue(value).into());
}
}};
}
fn deserialize_structure<B: Buf + UnwindSafe>(
marker: u8,
mut bytes: B,
) -> DeserializeResult<(Value, B)> {
let (_, signature) = get_structure_info(marker, &mut bytes)?;
match signature {
SIGNATURE_NODE => deserialize_struct!(Node, bytes),
SIGNATURE_RELATIONSHIP => deserialize_struct!(Relationship, bytes),
SIGNATURE_PATH => deserialize_struct!(Path, bytes),
SIGNATURE_UNBOUND_RELATIONSHIP => deserialize_struct!(UnboundRelationship, bytes),
SIGNATURE_DATE => {
let days_since_epoch: i64 = deserialize_variant!(Integer, bytes);
Ok((
Value::Date(
NaiveDate::from_ymd(1970, 1, 1) + chrono::Duration::days(days_since_epoch),
),
bytes,
))
}
SIGNATURE_TIME => {
let nanos_since_midnight: i64 = deserialize_variant!(Integer, bytes);
let zone_offset: i32 = deserialize_variant!(Integer, bytes) as i32;
Ok((
Value::Time(
NaiveTime::from_num_seconds_from_midnight(
(nanos_since_midnight / 1_000_000_000) as u32,
(nanos_since_midnight % 1_000_000_000) as u32,
),
FixedOffset::east(zone_offset),
),
bytes,
))
}
SIGNATURE_DATE_TIME_OFFSET => {
let epoch_seconds: i64 = deserialize_variant!(Integer, bytes);
let nanos: i64 = deserialize_variant!(Integer, bytes);
let offset_seconds: i32 = deserialize_variant!(Integer, bytes) as i32;
Ok((
Value::DateTimeOffset(DateTime::from_utc(
NaiveDateTime::from_timestamp(epoch_seconds, nanos as u32),
FixedOffset::east(offset_seconds),
)),
bytes,
))
}
SIGNATURE_DATE_TIME_ZONED => {
let epoch_seconds: i64 = deserialize_variant!(Integer, bytes);
let nanos: i64 = deserialize_variant!(Integer, bytes);
let timezone_id: String = deserialize_variant!(String, bytes);
let timezone: Tz = timezone_id.parse().unwrap();
Ok((
Value::DateTimeZoned(timezone.timestamp(epoch_seconds, nanos as u32)),
bytes,
))
}
SIGNATURE_LOCAL_TIME => {
let nanos_since_midnight: i64 = deserialize_variant!(Integer, bytes);
Ok((
Value::LocalTime(NaiveTime::from_num_seconds_from_midnight(
(nanos_since_midnight / 1_000_000_000) as u32,
(nanos_since_midnight % 1_000_000_000) as u32,
)),
bytes,
))
}
SIGNATURE_LOCAL_DATE_TIME => {
let epoch_seconds: i64 = deserialize_variant!(Integer, bytes);
let nanos: i64 = deserialize_variant!(Integer, bytes);
Ok((
Value::LocalDateTime(NaiveDateTime::from_timestamp(epoch_seconds, nanos as u32)),
bytes,
))
}
SIGNATURE_DURATION => deserialize_struct!(Duration, bytes),
SIGNATURE_POINT_2D => deserialize_struct!(Point2D, bytes),
SIGNATURE_POINT_3D => deserialize_struct!(Point3D, bytes),
_ => Err(DeserializationError::InvalidSignatureByte(signature)),
}
}
#[cfg(test)]
mod tests {
use std::{collections::HashMap, iter::FromIterator};
use chrono::{FixedOffset, NaiveDate, NaiveTime, TimeZone, Utc};
use super::*;
macro_rules! value_test {
($name:ident, $value:expr, $marker:expr, $($bytes:expr),+) => {
#[test]
fn $name() {
let value = $value;
let bytes: Bytes = vec![$marker]
.into_iter()
$(.chain($bytes.iter().copied()))*
.collect();
assert_eq!(value.marker().unwrap(), $marker);
assert_eq!(value.clone().serialize().unwrap(), &bytes);
let (deserialized, remaining) = Value::deserialize(bytes).unwrap();
assert_eq!(deserialized, value);
assert_eq!(remaining.len(), 0);
}
};
($name:ident, $value:expr, $marker:expr) => {
#[test]
fn $name() {
let value = $value;
let bytes = $value.clone().serialize().unwrap();
assert_eq!(value.marker().unwrap(), $marker);
let (deserialized, remaining) = Value::deserialize(bytes).unwrap();
assert_eq!(deserialized, value);
assert_eq!(remaining.len(), 0);
}
};
}
value_test!(null, Value::Null, MARKER_NULL, &[]);
value_test!(bool_true, Value::Boolean(true), MARKER_TRUE, &[]);
value_test!(bool_false, Value::Boolean(false), MARKER_FALSE, &[]);
value_test!(tiny_int, Value::Integer(110), 110, &[]);
value_test!(
small_int,
Value::Integer(-127),
MARKER_INT_8,
(-127_i8).to_be_bytes()
);
value_test!(
medium_int,
Value::Integer(8000),
MARKER_INT_16,
8000_i16.to_be_bytes()
);
value_test!(
medium_negative_int,
Value::Integer(-18621),
MARKER_INT_16,
(-18621_i16).to_be_bytes()
);
value_test!(
large_int,
Value::Integer(-1_000_000_000),
MARKER_INT_32,
(-1_000_000_000_i32).to_be_bytes()
);
value_test!(
very_large_int,
Value::Integer(9_000_000_000_000_000_000),
MARKER_INT_64,
9_000_000_000_000_000_000_i64.to_be_bytes()
);
value_test!(
float_min,
Value::Float(f64::MIN_POSITIVE),
MARKER_FLOAT,
f64::MIN_POSITIVE.to_be_bytes()
);
value_test!(
float_max,
Value::Float(f64::MAX),
MARKER_FLOAT,
f64::MAX.to_be_bytes()
);
value_test!(
float_e,
Value::Float(std::f64::consts::E),
MARKER_FLOAT,
std::f64::consts::E.to_be_bytes()
);
value_test!(
float_pi,
Value::Float(std::f64::consts::PI),
MARKER_FLOAT,
std::f64::consts::PI.to_be_bytes()
);
value_test!(
empty_byte_array,
Value::Bytes(vec![]),
MARKER_SMALL_BYTES,
&[0]
);
value_test!(
small_byte_array,
Value::Bytes(vec![1_u8; 100]),
MARKER_SMALL_BYTES,
&[100],
&[1_u8; 100]
);
value_test!(
medium_byte_array,
Value::Bytes(vec![99_u8; 1000]),
MARKER_MEDIUM_BYTES,
1000_u16.to_be_bytes(),
&[99_u8; 1000]
);
value_test!(
large_byte_array,
Value::Bytes(vec![1_u8; 100_000]),
MARKER_LARGE_BYTES,
100_000_u32.to_be_bytes(),
&[1_u8; 100_000]
);
value_test!(empty_list, Value::List(vec![]), MARKER_TINY_LIST | 0, &[]);
value_test!(
tiny_list,
Value::List(vec![Value::Integer(100_000); 3]),
MARKER_TINY_LIST | 3,
&[MARKER_INT_32],
100_000_u32.to_be_bytes(),
&[MARKER_INT_32],
100_000_u32.to_be_bytes(),
&[MARKER_INT_32],
100_000_u32.to_be_bytes()
);
value_test!(
small_list,
Value::List(vec![Value::String(String::from("item")); 100]),
MARKER_SMALL_LIST,
&[100],
&[MARKER_TINY_STRING | 4, b'i', b't', b'e', b'm'].repeat(100)
);
value_test!(
medium_list,
Value::List(vec![Value::Boolean(false); 1000]),
MARKER_MEDIUM_LIST,
1000_u16.to_be_bytes(),
&[MARKER_FALSE; 1000]
);
value_test!(
large_list,
Value::List(vec![Value::Integer(1); 70_000]),
MARKER_LARGE_LIST,
70_000_u32.to_be_bytes(),
&[1; 70_000]
);
value_test!(
tiny_string,
Value::String(String::from("string")),
MARKER_TINY_STRING | 6,
b"string"
);
value_test!(
small_string,
Value::String(String::from("string".repeat(10))),
MARKER_SMALL_STRING,
60_u8.to_be_bytes(),
b"string".repeat(10)
);
value_test!(
medium_string,
Value::String(String::from("string".repeat(1000))),
MARKER_MEDIUM_STRING,
6000_u16.to_be_bytes(),
b"string".repeat(1000)
);
value_test!(
large_string,
Value::String(String::from("string".repeat(100_000))),
MARKER_LARGE_STRING,
600_000_u32.to_be_bytes(),
b"string".repeat(100_000)
);
value_test!(
special_string,
Value::String(String::from("En å flöt över ängen")),
MARKER_SMALL_STRING,
24_u8.to_be_bytes(),
"En å flöt över ängen".bytes().collect::<Vec<_>>()
);
value_test!(
empty_map,
Value::from(HashMap::<&str, i8>::new()),
MARKER_TINY_MAP | 0,
&[]
);
value_test!(
tiny_map,
Value::from(HashMap::<&str, i8>::from_iter(vec![("a", 1_i8)])),
MARKER_TINY_MAP | 1,
&[MARKER_TINY_STRING | 1, b'a', 1]
);
#[test]
fn small_map_from_bytes() {
let small_map = Value::from(HashMap::<&str, i8>::from_iter(vec![
("a", 1_i8),
("b", 1_i8),
("c", 3_i8),
("d", 4_i8),
("e", 5_i8),
("f", 6_i8),
("g", 7_i8),
("h", 8_i8),
("i", 9_i8),
("j", 0_i8),
("k", 1_i8),
("l", 2_i8),
("m", 3_i8),
("n", 4_i8),
("o", 5_i8),
("p", 6_i8),
]));
let bytes = small_map.clone().serialize().unwrap();
let (deserialized, remaining) = Value::deserialize(bytes).unwrap();
assert_eq!(deserialized, small_map);
assert_eq!(remaining.len(), 0);
}
value_test!(
node,
Value::Node(Node::new(
24_i64,
vec!["TestNode".to_string()],
HashMap::from_iter(vec![
("key1".to_string(), -1_i8),
("key2".to_string(), 1_i8),
]),
)),
MARKER_TINY_STRUCT | 3
);
value_test!(
relationship,
Value::Relationship(Relationship::new(
24_i64,
32_i64,
128_i64,
"TestRel".to_string(),
HashMap::from_iter(vec![
("key1".to_string(), -2_i8),
("key2".to_string(), 2_i8),
]),
)),
MARKER_TINY_STRUCT | 5
);
value_test!(
path,
Value::Path(Path::new(
vec![Node::new(
24_i64,
vec!["TestNode".to_string()],
HashMap::from_iter(vec![
("key1".to_string(), -1_i8),
("key2".to_string(), 1_i8),
]),
)],
vec![UnboundRelationship::new(
128_i64,
"TestRel".to_string(),
HashMap::from_iter(vec![
("key1".to_string(), -2_i8),
("key2".to_string(), 2_i8),
]),
)],
vec![100, 101]
)),
MARKER_TINY_STRUCT | 3
);
value_test!(
unbound_relationship,
Value::UnboundRelationship(UnboundRelationship::new(
128_i64,
"TestRel".to_string(),
HashMap::from_iter(vec![
("key1".to_string(), -2_i8),
("key2".to_string(), 2_i8),
]),
)),
MARKER_TINY_STRUCT | 3
);
value_test!(
date,
Value::Date(NaiveDate::from_ymd(2020, 12, 25)),
MARKER_TINY_STRUCT | 1,
&[SIGNATURE_DATE],
&[MARKER_INT_16],
18621_i16.to_be_bytes()
);
value_test!(
past_date,
Value::Date(NaiveDate::from_ymd(1901, 12, 31)),
MARKER_TINY_STRUCT | 1,
&[SIGNATURE_DATE],
&[MARKER_INT_16],
(-24838_i16).to_be_bytes()
);
value_test!(
future_date,
Value::Date(NaiveDate::from_ymd(3000, 5, 23)),
MARKER_TINY_STRUCT | 1,
&[SIGNATURE_DATE],
&[MARKER_INT_32],
376342_i32.to_be_bytes()
);
value_test!(
time,
Value::Time(NaiveTime::from_hms_nano(0, 0, 0, 0), Utc.fix()),
MARKER_TINY_STRUCT | 2,
&[SIGNATURE_TIME],
&[0, 0]
);
value_test!(
about_four_pm_pacific,
Value::Time(
NaiveTime::from_hms_nano(16, 4, 35, 235),
FixedOffset::east(-8 * 3600),
),
MARKER_TINY_STRUCT | 2,
&[SIGNATURE_TIME],
&[MARKER_INT_64],
57875000000235_i64.to_be_bytes(),
&[MARKER_INT_16],
(-8 * 3600_i16).to_be_bytes()
);
value_test!(
date_time_offset,
Value::DateTimeOffset(
FixedOffset::east(-5 * 3600)
.from_utc_datetime(&NaiveDate::from_ymd(2050, 12, 31).and_hms_nano(23, 59, 59, 10)),
),
MARKER_TINY_STRUCT | 3,
&[SIGNATURE_DATE_TIME_OFFSET],
&[MARKER_INT_64],
2556143999_i64.to_be_bytes(),
&[10],
&[MARKER_INT_16],
(-5 * 3600_i16).to_be_bytes()
);
value_test!(
date_time_zoned,
Value::DateTimeZoned(
chrono_tz::Asia::Ulaanbaatar
.ymd(2030, 8, 3)
.and_hms_milli(14, 30, 1, 2),
),
MARKER_TINY_STRUCT | 3,
&[SIGNATURE_DATE_TIME_ZONED],
&[MARKER_INT_32],
1911969001_i32.to_be_bytes(),
&[MARKER_INT_32],
2000000_i32.to_be_bytes(),
&[MARKER_SMALL_STRING, 16],
b"Asia/Ulaanbaatar"
);
value_test!(
local_time,
Value::LocalTime(NaiveTime::from_hms_nano(23, 59, 59, 999)),
MARKER_TINY_STRUCT | 1,
&[SIGNATURE_LOCAL_TIME],
&[MARKER_INT_64],
86399000000999_i64.to_be_bytes()
);
value_test!(
local_date_time,
Value::LocalDateTime(NaiveDate::from_ymd(1999, 2, 27).and_hms_nano(1, 0, 0, 9999)),
MARKER_TINY_STRUCT | 2,
&[SIGNATURE_LOCAL_DATE_TIME],
&[MARKER_INT_32],
920077200_i32.to_be_bytes(),
&[MARKER_INT_16],
9999_i16.to_be_bytes()
);
value_test!(
duration,
Value::Duration(Duration::new(9876, 12345, 65332, 23435)),
MARKER_TINY_STRUCT | 4,
&[SIGNATURE_DURATION],
&[MARKER_INT_16],
9876_i16.to_be_bytes(),
&[MARKER_INT_16],
12345_i16.to_be_bytes(),
&[MARKER_INT_32],
65332_i32.to_be_bytes(),
&[MARKER_INT_16],
23435_i16.to_be_bytes()
);
value_test!(
point_2d,
Value::Point2D(Point2D::new(9876, 12.312_345, 134_564.123_567_543)),
MARKER_TINY_STRUCT | 3,
&[SIGNATURE_POINT_2D],
&[MARKER_INT_16],
9876_i16.to_be_bytes(),
&[MARKER_FLOAT],
12.312345_f64.to_be_bytes(),
&[MARKER_FLOAT],
134_564.123_567_543_f64.to_be_bytes()
);
value_test!(
point_3d,
Value::Point3D(Point3D::new(
249,
543.598_387,
2_945_732_849.293_85,
45_438.874_385
)),
MARKER_TINY_STRUCT | 4,
&[SIGNATURE_POINT_3D],
&[MARKER_INT_16],
249_i16.to_be_bytes(),
&[MARKER_FLOAT],
543.598_387_f64.to_be_bytes(),
&[MARKER_FLOAT],
2_945_732_849.293_85_f64.to_be_bytes(),
&[MARKER_FLOAT],
45_438.874_385_f64.to_be_bytes()
);
#[test]
#[ignore]
fn value_size() {
use std::mem::size_of;
println!("Duration: {} bytes", size_of::<Duration>());
println!("Node: {} bytes", size_of::<Node>());
println!("Path: {} bytes", size_of::<Path>());
println!("Point2D: {} bytes", size_of::<Point2D>());
println!("Point3D: {} bytes", size_of::<Point3D>());
println!("Relationship: {} bytes", size_of::<Relationship>());
println!(
"UnboundRelationship: {} bytes",
size_of::<UnboundRelationship>()
);
println!("Value: {} bytes", size_of::<Value>())
}
}
| {
match self {
Value::Boolean(true) => Ok(MARKER_TRUE),
Value::Boolean(false) => Ok(MARKER_FALSE),
Value::Integer(integer) => match integer {
-9_223_372_036_854_775_808..=-2_147_483_649
| 2_147_483_648..=9_223_372_036_854_775_807 => Ok(MARKER_INT_64),
-2_147_483_648..=-32_769 | 32_768..=2_147_483_647 => Ok(MARKER_INT_32),
-32_768..=-129 | 128..=32_767 => Ok(MARKER_INT_16),
-128..=-17 => Ok(MARKER_INT_8),
-16..=127 => Ok(*integer as u8),
},
Value::Float(_) => Ok(MARKER_FLOAT),
Value::Bytes(bytes) => match bytes.len() {
0..=255 => Ok(MARKER_SMALL_BYTES),
256..=65_535 => Ok(MARKER_MEDIUM_BYTES),
65_536..=2_147_483_647 => Ok(MARKER_LARGE_BYTES),
_ => Err(SerializationError::ValueTooLarge(bytes.len())),
},
Value::List(list) => match list.len() {
0..=15 => Ok(MARKER_TINY_LIST | list.len() as u8),
16..=255 => Ok(MARKER_SMALL_LIST),
256..=65_535 => Ok(MARKER_MEDIUM_LIST),
65_536..=4_294_967_295 => Ok(MARKER_LARGE_LIST),
len => Err(SerializationError::ValueTooLarge(len)),
},
Value::Map(map) => match map.len() {
0..=15 => Ok(MARKER_TINY_MAP | map.len() as u8),
16..=255 => Ok(MARKER_SMALL_MAP),
256..=65_535 => Ok(MARKER_MEDIUM_MAP),
65_536..=4_294_967_295 => Ok(MARKER_LARGE_MAP),
_ => Err(SerializationError::ValueTooLarge(map.len())),
},
Value::Null => Ok(MARKER_NULL),
Value::String(string) => match string.len() {
0..=15 => Ok(MARKER_TINY_STRING | string.len() as u8),
16..=255 => Ok(MARKER_SMALL_STRING),
256..=65_535 => Ok(MARKER_MEDIUM_STRING),
65_536..=4_294_967_295 => Ok(MARKER_LARGE_STRING),
_ => Err(SerializationError::ValueTooLarge(string.len())),
},
Value::Node(node) => node.marker(),
Value::Relationship(rel) => rel.marker(),
Value::Path(path) => path.marker(),
Value::UnboundRelationship(unbound_rel) => unbound_rel.marker(),
Value::Date(_) => Ok(MARKER_TINY_STRUCT | 1),
Value::Time(_, _) => Ok(MARKER_TINY_STRUCT | 2),
Value::DateTimeOffset(_) => Ok(MARKER_TINY_STRUCT | 3),
Value::DateTimeZoned(_) => Ok(MARKER_TINY_STRUCT | 3),
Value::LocalTime(_) => Ok(MARKER_TINY_STRUCT | 1),
Value::LocalDateTime(_) => Ok(MARKER_TINY_STRUCT | 2),
Value::Duration(duration) => duration.marker(),
Value::Point2D(point_2d) => point_2d.marker(),
Value::Point3D(point_3d) => point_3d.marker(),
}
} |
crear-cuestionario.component.ts | import { Component, OnInit } from '@angular/core';
import { FormGroup, FormBuilder, Validators } from '@angular/forms';
import { ActivatedRoute, Router } from '@angular/router';
import { MatDialog } from '@angular/material';
import { SesionService, PeticionesAPIService, CalculosService } from 'src/app/servicios';
import { Cuestionario } from 'src/app/clases';
import Swal from 'sweetalert2';
import { AgregarPreguntasDialogComponent } from './agregar-preguntas-dialog/agregar-preguntas-dialog.component';
import { DialogoConfirmacionComponent } from '../COMPARTIDO/dialogo-confirmacion/dialogo-confirmacion.component';
import { Observable, of } from 'rxjs';
@Component({
selector: 'app-crear-cuestionario',
templateUrl: './crear-cuestionario.component.html',
styleUrls: ['./crear-cuestionario.component.scss']
})
export class CrearCuestionarioComponent implements OnInit {
// ID del Profesor
profesorId: number;
// Cuestionario que hemos creado
cuestionario: Cuestionario;
// Para el stepper
myForm: FormGroup;
// URL del inicio
URLVueltaInicio: string;
// Para saber si el botón está habilitado o no
// tslint:disable-next-line:ban-types
isDisabled: Boolean = true;
// Ver el estado de si el cuestionario esta creado o aun no
cuestionarioYaCreado: Boolean = false;
finalizar: Boolean = false;
constructor(private route: ActivatedRoute,
private router: Router,
public dialog: MatDialog,
public sesion: SesionService,
public peticionesAPI: PeticionesAPIService,
private _formBuilder: FormBuilder,
public calculos: CalculosService) { }
ngOnInit() {
// Cogemos el ID del Profesor de la URL
this.profesorId = this.sesion.DameProfesor().id;
this.URLVueltaInicio = this.route.snapshot.queryParams.URLVueltaInicio || '/inicio';
this.myForm = this._formBuilder.group({
tituloCuestionario: ['', Validators.required],
descripcionCuestionario: ['', Validators.required]
});
this.peticionesAPI.DameTodosMisCuestionarios (this.profesorId)
.subscribe (lista => this.sesion.TomaListaCuestionarios(lista));
}
// MIRO SI HAY ALGO SIMULTÁNEAMENTE EN EL NOMBRE Y LA DESCRIPCIÓN
Disabled() {
if (this.myForm.value.tituloCuestionario === '' || this.myForm.value.descripcionCuestionario === '') {
// Si alguno de los valores es igual a nada, entonces estará desactivado
this.isDisabled = true;
} else {
// Si ambos son diferentes a nulo, estará activado.
this.isDisabled = false;
}
}
// CREACION DEL GRUPO CON EL TITULO Y LA DESCRIPCION QUE HAYAMOS ESTABLECIDO
CrearCuestionario() { | let tituloCuestionario: string;
let descripcionCuestionario: string;
tituloCuestionario = this.myForm.value.tituloCuestionario;
descripcionCuestionario = this.myForm.value.descripcionCuestionario;
this.peticionesAPI.CreaCuestionario(new Cuestionario(tituloCuestionario, descripcionCuestionario), this.profesorId)
.subscribe((res) => {
if (res != null) {
this.cuestionarioYaCreado = true;
this.cuestionario = res;
} else {
Swal.fire('Se ha producido un error creando el cuestionario', 'ERROR', 'error');
}
});
}
// NOS PERMITE HACER MODIFICACIONES EN LAS CARACTERISTICAS DEL CUESTIONARIO
EditarCuestionario() {
let tituloCuestionario: string;
let descripcionCuestionario: string;
tituloCuestionario = this.myForm.value.tituloCuestionario;
descripcionCuestionario = this.myForm.value.descripcionCuestionario;
// tslint:disable-next-line:max-line-length
this.peticionesAPI.ModificaCuestionario(new Cuestionario(tituloCuestionario, descripcionCuestionario), this.profesorId, this.cuestionario.id)
.subscribe((res) => {
if (res != null) {
this.cuestionario = res;
} else {
Swal.fire('Se ha producido un error editando el cuestionario', 'ERROR', 'error');
}
});
}
AbrirDialogoAgregarPreguntas(): void {
const dialogRef = this.dialog.open(AgregarPreguntasDialogComponent, {
width: '50%',
height: '80%',
position: {
top: '0%'
},
// Pasamos los parametros necesarios
data: {
cuestionarioId: this.cuestionario.id,
profesorId: this.profesorId
}
});
}
// VUELTA AL INICIO
VueltaInicio() {
this.router.navigate([this.URLVueltaInicio, this.profesorId]);
}
goBack() {
this.router.navigate(['/inicio/' + this.profesorId]);
}
aceptarGoBack() {
Swal.fire('El cuestionario se ha creado correctamente', 'Enhorabuena', 'success');
this.finalizar = true;
this.goBack();
}
// Aqui establecemos la guarda por si el usuario quiere abandonar la creacion del
// cuestionario antes de tiempo
canExit(): Observable <boolean> {
if (!this.cuestionarioYaCreado || this.finalizar) {
return of (true);
} else {
const confirmacionObservable = new Observable <boolean>( obs => {
const dialogRef = this.dialog.open(DialogoConfirmacionComponent, {
height: '150px',
data: {
mensaje: 'Confirma que quieres abandonar el proceso de creación de cuestionario',
}
});
dialogRef.afterClosed().subscribe((confirmed: boolean) => {
if (confirmed) {
// Si confirma que quiere salir entonces eliminamos el cuestionario que se ha creado
this.sesion.TomaCuestionario (this.cuestionario);
this.calculos.EliminarCuestionario().subscribe ( () => obs.next(confirmed));
} else {
obs.next (confirmed);
}
});
});
return confirmacionObservable;
}
}
} | |
direct_decl_smart_constructors.rs | /**
* Copyright (c) 2019, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the "hack" directory of this source tree.
*
*/
use parser_rust as parser;
use escaper::{extract_unquoted_string, unescape_double, unescape_single};
use flatten_smart_constructors::{FlattenOp, FlattenSmartConstructors};
use hhbc_rust::string_utils::GetName;
use oxidized::{
aast,
ast_defs::Id,
direct_decl_parser::Decls,
pos::Pos,
s_map::SMap,
typing_defs::{Ty, Ty_},
typing_reason::Reason,
};
use parser::{
indexed_source_text::IndexedSourceText, lexable_token::LexableToken, token_kind::TokenKind,
};
pub use crate::direct_decl_smart_constructors_generated::*;
pub fn empty_decls() -> Decls {
Decls {
classes: SMap::new(),
funs: SMap::new(),
typedefs: SMap::new(),
consts: SMap::new(),
}
}
fn try_collect<T, E>(vec: Vec<Result<T, E>>) -> Result<Vec<T>, E> {
vec.into_iter().try_fold(Vec::new(), |mut acc, elem| {
acc.push(elem?);
Ok(acc)
})
}
fn mangle_xhp_id(mut name: String) -> String {
fn ignore_id(name: &str) -> bool {
name.starts_with("class@anonymous") || name.starts_with("Closure$")
}
fn is_xhp(name: &str) -> bool {
name.chars().next().map_or(false, |c| c == ':')
}
if !ignore_id(&name) {
if is_xhp(&name) {
name.replace_range(..1, "xhp_")
}
name.replace(":", "__").replace("-", "_")
} else {
name
}
}
pub fn get_name(namespace: &str, name: &Node_) -> Result<String, String> {
fn qualified_name_from_parts(namespace: &str, parts: &Vec<Node_>) -> Result<String, String> {
let mut qualified_name = String::new();
let mut leading_backslash = false;
for (index, part) in parts.into_iter().enumerate() {
match part {
Node_::Name(name) => {
qualified_name.push_str(&String::from_utf8_lossy(name.get().as_slice()))
}
Node_::Backslash if index == 0 => leading_backslash = true,
Node_::ListItem(listitem) => {
if let (Node_::Name(name), Node_::Backslash) = &**listitem {
qualified_name.push_str(&String::from_utf8_lossy(name.get().as_slice()));
qualified_name.push_str("\\");
} else {
return Err(format!(
"Expected a name or backslash, but got {:?}",
listitem
));
}
}
n => {
return Err(format!(
"Expected a name, backslash, or list item, but got {:?}",
n
))
}
}
}
Ok(if leading_backslash || namespace.is_empty() {
qualified_name // globally qualified name
} else {
namespace.to_owned() + "\\" + &qualified_name
})
}
match name {
Node_::Name(name) => {
// always a simple name
let name = name.to_string();
Ok(if namespace.is_empty() {
name
} else {
namespace.to_owned() + "\\" + &name
})
}
Node_::XhpName(name) => {
// xhp names are always unqualified
let name = name.to_string();
Ok(mangle_xhp_id(name))
}
Node_::QualifiedName(parts) => qualified_name_from_parts(namespace, &parts),
n => {
return Err(format!(
"Expected a name, XHP name, or qualified name, but got {:?}",
n
))
}
}
}
#[derive(Clone, Debug)]
pub struct State<'a> {
pub source_text: IndexedSourceText<'a>,
pub decls: Decls,
}
#[derive(Clone, Debug)]
pub enum HintValue {
String,
Int,
Float,
Num,
Bool,
Apply(GetName),
}
#[derive(Clone, Debug)]
pub enum Node_ {
List(Vec<Node_>),
Ignored,
// tokens
Name(GetName),
String(GetName),
XhpName(GetName),
Hint(HintValue, Pos),
Backslash,
ListItem(Box<(Node_, Node_)>),
Class,
Interface,
Trait,
Extends,
Implements,
Abstract,
Final,
Static,
QualifiedName(Vec<Node_>),
ScopeResolutionExpression(Box<(Node_, Node_)>),
// declarations
ClassDecl(Box<ClassDeclChildren>),
FunctionDecl(Box<Node_>),
MethodDecl(Box<Node_>),
EnumDecl(Box<EnumDeclChildren>),
TraitUseClause(Box<Node_>),
RequireExtendsClause(Box<Node_>),
RequireImplementsClause(Box<Node_>),
Define(Box<Node_>),
TypeAliasDecl(Box<TypeAliasDeclChildren>),
NamespaceDecl(Box<Node_>, Box<Node_>),
EmptyBody,
}
pub type Node = Result<Node_, String>;
#[derive(Clone, Debug)]
pub struct ClassDeclChildren {
pub modifiers: Node_,
pub kind: Node_,
pub name: Node_,
pub attributes: Node_,
pub extends: Node_,
pub implements: Node_,
pub constrs: Node_,
pub body: Node_,
}
#[derive(Clone, Debug)]
pub struct EnumDeclChildren {
pub name: Node_,
pub attributes: Node_,
}
#[derive(Clone, Debug)]
pub struct TypeAliasDeclChildren {
pub name: Node_,
pub attributes: Node_,
}
impl<'a> FlattenOp for DirectDeclSmartConstructors<'_> {
type S = Node;
fn flatten(lst: Vec<Self::S>) -> Self::S {
let r = lst
.into_iter()
.map(|s| match s {
Ok(Node_::List(children)) => children.into_iter().map(|x| Ok(x)).collect(),
x => {
if Self::is_zero(&x) {
vec![]
} else {
vec![x]
}
}
})
.flatten()
.collect::<Vec<Self::S>>();
let mut r = try_collect(r)?;
Ok(match r.as_slice() {
[] => Node_::Ignored,
[_] => r.pop().unwrap(),
_ => Node_::List(r),
})
}
fn zero() -> Self::S {
Ok(Node_::Ignored)
}
fn is_zero(s: &Self::S) -> bool {
if let Ok(s) = s {
match s {
Node_::Ignored |
// tokens
Node_::Name(_) |
Node_::String(_) |
Node_::XhpName(_) |
Node_::Hint(_, _) |
Node_::Backslash |
Node_::ListItem(_) |
Node_::Class |
Node_::Interface |
Node_::Trait |
Node_::Extends |
Node_::Implements |
Node_::Abstract |
Node_::Final |
Node_::Static |
Node_::QualifiedName(_) => true,
_ => false,
}
} else {
false
}
}
}
impl<'a> FlattenSmartConstructors<'a, State<'a>> for DirectDeclSmartConstructors<'a> {
fn make_token(&mut self, token: Self::Token) -> Self::R {
let token_text = || {
self.state
.source_text
.source_text()
.sub(
token.leading_start_offset().unwrap_or(0) + token.leading_width(),
token.width(),
)
.to_vec()
};
let token_pos = || {
self.state
.source_text
.relative_pos(token.start_offset(), token.end_offset() + 1)
};
let kind = token.kind();
Ok(match kind {
TokenKind::Name => Node_::Name(GetName::new(token_text(), |string| string)),
TokenKind::DecimalLiteral => Node_::String(GetName::new(token_text(), |string| string)),
TokenKind::SingleQuotedStringLiteral => {
Node_::String(GetName::new(token_text(), |string| {
let tmp = unescape_single(string.as_str()).ok().unwrap();
extract_unquoted_string(&tmp, 0, tmp.len()).ok().unwrap()
}))
}
TokenKind::DoubleQuotedStringLiteral => {
Node_::String(GetName::new(token_text(), |string| {
let tmp = unescape_double(string.as_str()).ok().unwrap();
extract_unquoted_string(&tmp, 0, tmp.len()).ok().unwrap()
}))
}
TokenKind::XHPClassName => Node_::XhpName(GetName::new(token_text(), |string| string)),
TokenKind::String => Node_::Hint(HintValue::String, token_pos()),
TokenKind::Int => Node_::Hint(HintValue::Int, token_pos()),
TokenKind::Float => Node_::Hint(HintValue::Float, token_pos()),
TokenKind::Double => Node_::Hint(
HintValue::Apply(GetName::new(token_text(), |string| string)),
token_pos(),
),
TokenKind::Num => Node_::Hint(HintValue::Num, token_pos()),
TokenKind::Bool => Node_::Hint(HintValue::Bool, token_pos()),
TokenKind::Boolean => Node_::Hint(
HintValue::Apply(GetName::new(token_text(), |string| string)),
token_pos(),
),
TokenKind::Backslash => Node_::Backslash,
TokenKind::Class => Node_::Class,
TokenKind::Trait => Node_::Trait,
TokenKind::Interface => Node_::Interface,
TokenKind::Extends => Node_::Extends,
TokenKind::Implements => Node_::Implements,
TokenKind::Abstract => Node_::Abstract,
TokenKind::Final => Node_::Final,
TokenKind::Static => Node_::Static,
_ => Node_::Ignored,
})
}
fn make_missing(&mut self, _: usize) -> Self::R {
Ok(Node_::Ignored)
}
fn make_list(&mut self, items: Vec<Self::R>, _: usize) -> Self::R {
let result = if !items.is_empty()
&& !items.iter().all(|r| match r {
Ok(Node_::Ignored) => true,
_ => false,
}) {
let items = try_collect(items)?;
Node_::List(items)
} else {
Node_::Ignored
};
Ok(result)
}
fn make_qualified_name(&mut self, arg0: Self::R) -> Self::R {
Ok(match arg0? {
Node_::Ignored => Node_::Ignored,
Node_::List(nodes) => Node_::QualifiedName(nodes),
node => Node_::QualifiedName(vec![node]),
})
}
fn make_simple_type_specifier(&mut self, arg0: Self::R) -> Self::R {
arg0
}
fn make_simple_initializer(&mut self, _arg0: Self::R, arg1: Self::R) -> Self::R {
arg1
}
fn make_literal_expression(&mut self, arg0: Self::R) -> Self::R {
arg0
}
fn make_list_item(&mut self, item: Self::R, sep: Self::R) -> Self::R {
Ok(match (item?, sep?) {
(Node_::Ignored, Node_::Ignored) => Node_::Ignored, | (x, y) => Node_::ListItem(Box::new((x, y))),
})
}
fn make_generic_type_specifier(
&mut self,
class_type: Self::R,
_argument_list: Self::R,
) -> Self::R {
class_type
}
fn make_enum_declaration(
&mut self,
attributes: Self::R,
_keyword: Self::R,
name: Self::R,
_colon: Self::R,
_base: Self::R,
_type: Self::R,
_left_brace: Self::R,
_enumerators: Self::R,
_right_brace: Self::R,
) -> Self::R {
let (name, attributes) = (name?, attributes?);
Ok(match name {
Node_::Ignored => Node_::Ignored,
_ => Node_::EnumDecl(Box::new(EnumDeclChildren { name, attributes })),
})
}
fn make_alias_declaration(
&mut self,
attributes: Self::R,
_keyword: Self::R,
name: Self::R,
_generic_params: Self::R,
_constraint: Self::R,
_equal: Self::R,
_type: Self::R,
_semicolon: Self::R,
) -> Self::R {
let (name, attributes) = (name?, attributes?);
Ok(match name {
Node_::Ignored => Node_::Ignored,
_ => Node_::TypeAliasDecl(Box::new(TypeAliasDeclChildren { name, attributes })),
})
}
fn make_define_expression(
&mut self,
_keyword: Self::R,
_left_paren: Self::R,
args: Self::R,
_right_paren: Self::R,
) -> Self::R {
match args? {
Node_::List(mut nodes) => {
if let Some(_snd) = nodes.pop() {
if let Some(fst @ Node_::String(_)) = nodes.pop() {
if nodes.is_empty() {
return Ok(Node_::Define(Box::new(fst)));
}
}
}
}
_ => (),
};
Ok(Node_::Ignored)
}
fn make_function_declaration(
&mut self,
_attributes: Self::R,
header: Self::R,
body: Self::R,
) -> Self::R {
Ok(match (header?, body?) {
(Node_::Ignored, Node_::Ignored) => Node_::Ignored,
(v, Node_::Ignored) | (Node_::Ignored, v) => v,
(v1, v2) => Node_::List(vec![v1, v2]),
})
}
fn make_function_declaration_header(
&mut self,
_modifiers: Self::R,
_keyword: Self::R,
name: Self::R,
_type_params: Self::R,
_left_parens: Self::R,
_param_list: Self::R,
_right_parens: Self::R,
_colon: Self::R,
_type: Self::R,
_where: Self::R,
) -> Self::R {
Ok(match name? {
Node_::Ignored => Node_::Ignored,
name => Node_::FunctionDecl(Box::new(name)),
})
}
fn make_trait_use(
&mut self,
_keyword: Self::R,
names: Self::R,
_semicolon: Self::R,
) -> Self::R {
Ok(match names? {
Node_::Ignored => Node_::Ignored,
names => Node_::TraitUseClause(Box::new(names)),
})
}
fn make_require_clause(
&mut self,
_keyword: Self::R,
kind: Self::R,
name: Self::R,
_semicolon: Self::R,
) -> Self::R {
Ok(match name? {
Node_::Ignored => Node_::Ignored,
name => match kind? {
Node_::Extends => Node_::RequireExtendsClause(Box::new(name)),
Node_::Implements => Node_::RequireImplementsClause(Box::new(name)),
_ => Node_::Ignored,
},
})
}
fn make_const_declaration(
&mut self,
_arg0: Self::R,
_arg1: Self::R,
hint: Self::R,
decls: Self::R,
_arg4: Self::R,
) -> Self::R {
// None of the Node_::Ignoreds should happen in a well-formed file, but they could happen in
// a malformed one.
let hint = hint?;
Ok(match decls? {
Node_::List(nodes) => match nodes.as_slice() {
[Node_::List(nodes)] => match nodes.as_slice() {
[name, _] => {
let name = get_name("", name)?;
match hint {
Node_::Hint(hv, pos) => {
let reason = Reason::Rhint(pos.clone());
let ty_ = match hv {
HintValue::String => Ty_::Tprim(aast::Tprim::Tstring),
HintValue::Int => Ty_::Tprim(aast::Tprim::Tint),
HintValue::Float => Ty_::Tprim(aast::Tprim::Tfloat),
HintValue::Num => Ty_::Tprim(aast::Tprim::Tnum),
HintValue::Bool => Ty_::Tprim(aast::Tprim::Tbool),
HintValue::Apply(gn) => Ty_::Tapply(
Id(pos, "\\".to_string() + &(gn.to_unescaped_string())),
Vec::new(),
),
};
self.state
.decls
.consts
.insert(name, Ty(reason, Box::new(ty_)));
}
n => {
return Err(format!(
"Expected primitive value for constant {}, but was {:?}",
name, n
))
}
};
Node_::Ignored
}
_ => Node_::Ignored,
},
_ => Node_::Ignored,
},
_ => Node_::Ignored,
})
}
fn make_constant_declarator(&mut self, name: Self::R, initializer: Self::R) -> Self::R {
let (name, initializer) = (name?, initializer?);
Ok(match name {
Node_::Ignored => Node_::Ignored,
_ => Node_::List(vec![name, initializer]),
})
}
fn make_namespace_declaration(
&mut self,
_keyword: Self::R,
name: Self::R,
body: Self::R,
) -> Self::R {
let (name, body) = (name?, body?);
Ok(match body {
Node_::Ignored => Node_::Ignored,
_ => Node_::NamespaceDecl(Box::new(name), Box::new(body)),
})
}
fn make_namespace_body(
&mut self,
_left_brace: Self::R,
decls: Self::R,
_right_brace: Self::R,
) -> Self::R {
decls
}
fn make_namespace_empty_body(&mut self, _semicolon: Self::R) -> Self::R {
Ok(Node_::EmptyBody)
}
fn make_methodish_declaration(
&mut self,
_attributes: Self::R,
_function_decl_header: Self::R,
body: Self::R,
_semicolon: Self::R,
) -> Self::R {
let body = body?;
Ok(match body {
Node_::Ignored => Node_::Ignored,
_ => Node_::MethodDecl(Box::new(body)),
})
}
fn make_classish_declaration(
&mut self,
attributes: Self::R,
modifiers: Self::R,
keyword: Self::R,
name: Self::R,
_type_params: Self::R,
_extends_keyword: Self::R,
extends: Self::R,
_implements_keyword: Self::R,
implements: Self::R,
constrs: Self::R,
body: Self::R,
) -> Self::R {
let name = name?;
Ok(match name {
Node_::Ignored => Node_::Ignored,
_ => Node_::ClassDecl(Box::new(ClassDeclChildren {
modifiers: modifiers?,
kind: keyword?,
name,
attributes: attributes?,
extends: extends?,
implements: implements?,
constrs: constrs?,
body: body?,
})),
})
}
fn make_classish_body(
&mut self,
_left_brace: Self::R,
elements: Self::R,
_right_brace: Self::R,
) -> Self::R {
elements
}
fn make_old_attribute_specification(
&mut self,
_left_double_angle: Self::R,
attributes: Self::R,
_right_double_angle: Self::R,
) -> Self::R {
attributes
}
fn make_attribute_specification(&mut self, attributes: Self::R) -> Self::R {
attributes
}
fn make_attribute(&mut self, _at: Self::R, attibute: Self::R) -> Self::R {
attibute
}
fn make_constructor_call(
&mut self,
class_type: Self::R,
_left_paren: Self::R,
argument_list: Self::R,
_right_paren: Self::R,
) -> Self::R {
Ok(Node_::ListItem(Box::new((class_type?, argument_list?))))
}
fn make_decorated_expression(&mut self, _decorator: Self::R, expression: Self::R) -> Self::R {
expression
}
fn make_scope_resolution_expression(
&mut self,
qualifier: Self::R,
_operator: Self::R,
name: Self::R,
) -> Self::R {
Ok(Node_::ScopeResolutionExpression(Box::new((
qualifier?, name?,
))))
}
} | (x, Node_::Ignored) | (Node_::Ignored, x) => x, |
graphql-index-transformer.ts | import { DirectiveWrapper, InvalidDirectiveError, TransformerPluginBase } from '@aws-amplify/graphql-transformer-core';
import {
TransformerContextProvider,
TransformerResolverProvider,
TransformerSchemaVisitStepContextProvider,
TransformerTransformSchemaStepContextProvider,
} from '@aws-amplify/graphql-transformer-interfaces';
import {
DirectiveNode, | FieldDefinitionNode,
InterfaceTypeDefinitionNode,
Kind,
ObjectTypeDefinitionNode,
} from 'graphql';
import { isListType, isScalarOrEnum } from 'graphql-transformer-common';
import { appendSecondaryIndex, constructSyncVTL, updateResolversForIndex } from './resolvers';
import { addKeyConditionInputs, ensureQueryField, updateMutationConditionInput } from './schema';
import { IndexDirectiveConfiguration } from './types';
import { validateNotSelfReferencing } from './utils';
const directiveName = 'index';
const directiveDefinition = `
directive @${directiveName}(name: String!, sortKeyFields: [String], queryField: String) repeatable on FIELD_DEFINITION
`;
/**
*
*/
export class IndexTransformer extends TransformerPluginBase {
private directiveList: IndexDirectiveConfiguration[] = [];
private resolverMap: Map<TransformerResolverProvider, string> = new Map();
constructor() {
super('amplify-index-transformer', directiveDefinition);
}
field = (
parent: ObjectTypeDefinitionNode | InterfaceTypeDefinitionNode,
definition: FieldDefinitionNode,
directive: DirectiveNode,
context: TransformerSchemaVisitStepContextProvider,
): void => {
const directiveWrapped = new DirectiveWrapper(directive);
const args = directiveWrapped.getArguments({
object: parent as ObjectTypeDefinitionNode,
field: definition,
directive,
} as IndexDirectiveConfiguration);
if (!args.sortKeyFields) {
args.sortKeyFields = [];
} else if (!Array.isArray(args.sortKeyFields)) {
args.sortKeyFields = [args.sortKeyFields];
}
args.sortKey = [];
validate(args, context as TransformerContextProvider);
this.directiveList.push(args);
};
public after = (ctx: TransformerContextProvider): void => {
if (!ctx.isProjectUsingDataStore()) return;
// construct sync VTL code
this.resolverMap.forEach((syncVTLContent, resource) => {
if (syncVTLContent) {
constructSyncVTL(syncVTLContent, resource);
}
});
};
transformSchema = (ctx: TransformerTransformSchemaStepContextProvider): void => {
const context = ctx as TransformerContextProvider;
for (const config of this.directiveList) {
ensureQueryField(config, context);
addKeyConditionInputs(config, context);
updateMutationConditionInput(config, context);
}
};
generateResolvers = (ctx: TransformerContextProvider): void => {
for (const config of this.directiveList) {
appendSecondaryIndex(config, ctx);
updateResolversForIndex(config, ctx, this.resolverMap);
}
};
}
const validate = (config: IndexDirectiveConfiguration, ctx: TransformerContextProvider): void => {
const {
name, object, field, sortKeyFields,
} = config;
const defaultGSI = ctx.featureFlags.getBoolean('secondaryKeyAsGSI', true);
validateNotSelfReferencing(config);
const modelDirective = object.directives!.find(directive => directive.name.value === 'model');
if (!modelDirective) {
throw new InvalidDirectiveError(`The @${directiveName} directive may only be added to object definitions annotated with @model.`);
}
config.modelDirective = modelDirective;
const fieldMap = new Map<string, FieldDefinitionNode>();
for (const objectField of object.fields!) {
fieldMap.set(objectField.name.value, objectField);
for (const peerDirective of objectField.directives!) {
if (peerDirective === config.directive) {
continue;
}
if (peerDirective.name.value === 'primaryKey') {
const hasSortFields = peerDirective.arguments!.some((arg: any) => arg.name.value === 'sortKeyFields' && arg.value.values?.length > 0);
config.primaryKeyField = objectField;
if (!hasSortFields && objectField.name.value === field.name.value) {
throw new InvalidDirectiveError(
`Invalid @index '${name}'. You may not create an index where the partition key `
+ 'is the same as that of the primary key unless the primary key has a sort field. '
+ 'You cannot have a local secondary index without a sort key in the primary key.',
);
}
}
if (
peerDirective.name.value === directiveName
&& peerDirective.arguments!.some((arg: any) => arg.name.value === 'name' && arg.value.value === name)
) {
throw new InvalidDirectiveError(
`You may only supply one @${directiveName} with the name '${name}' on type '${object.name.value}'.`,
);
}
}
for (const peerDirective of objectField.directives!) {
const hasSortFields = peerDirective.arguments!.some((arg: any) => arg.name.value === 'sortKeyFields' && arg.value.values?.length > 0);
if (!defaultGSI && !hasSortFields && objectField == config.primaryKeyField && objectField.name.value === field.name.value) {
throw new InvalidDirectiveError(
`Invalid @index '${name}'. You may not create an index where the partition key `
+ 'is the same as that of the primary key unless the index has a sort field. '
+ 'You cannot have a local secondary index without a sort key in the index.',
);
}
}
}
const enums = ctx.output.getTypeDefinitionsOfKind(Kind.ENUM_TYPE_DEFINITION) as EnumTypeDefinitionNode[];
if (!isScalarOrEnum(field.type, enums) || isListType(field.type)) {
throw new InvalidDirectiveError(`Index '${name}' on type '${object.name.value}.${field.name.value}' cannot be a non-scalar.`);
}
for (const sortKeyFieldName of sortKeyFields) {
const sortField = fieldMap.get(sortKeyFieldName);
if (!sortField) {
throw new InvalidDirectiveError(
`Can't find field '${sortKeyFieldName}' in ${object.name.value}, but it was specified in index '${name}'.`,
);
}
if (!isScalarOrEnum(sortField.type, enums) || isListType(sortField.type)) {
throw new InvalidDirectiveError(
`The sort key of index '${name}' on type '${object.name.value}.${sortField.name.value}' cannot be a non-scalar.`,
);
}
config.sortKey.push(sortField);
}
}; | EnumTypeDefinitionNode, |
ipamd.go | // Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"). You may
// not use this file except in compliance with the License. A copy of the
// License is located at
//
// http://aws.amazon.com/apache2.0/
//
// or in the "license" file accompanying this file. This file is distributed
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
// express or implied. See the License for the specific language governing
// permissions and limitations under the License.
package ipamd
import (
"context"
"fmt"
"math"
"net"
"os"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"sigs.k8s.io/controller-runtime/pkg/client"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/ec2"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/util/retry"
"github.com/aws/amazon-vpc-cni-k8s/pkg/awsutils"
"github.com/aws/amazon-vpc-cni-k8s/pkg/eniconfig"
"github.com/aws/amazon-vpc-cni-k8s/pkg/ipamd/datastore"
"github.com/aws/amazon-vpc-cni-k8s/pkg/networkutils"
"github.com/aws/amazon-vpc-cni-k8s/pkg/utils/logger"
)
// The package ipamd is a long running daemon which manages a warm pool of available IP addresses.
// It also monitors the size of the pool, dynamically allocates more ENIs when the pool size goes below
// the minimum threshold and frees them back when the pool size goes above max threshold.
const (
ipPoolMonitorInterval = 5 * time.Second
maxRetryCheckENI = 5
eniAttachTime = 10 * time.Second
nodeIPPoolReconcileInterval = 60 * time.Second
decreaseIPPoolInterval = 30 * time.Second
// ipReconcileCooldown is the amount of time that an IP address must wait until it can be added to the data store
// during reconciliation after being discovered on the EC2 instance metadata.
ipReconcileCooldown = 60 * time.Second
// This environment variable is used to specify the desired number of free IPs always available in the "warm pool".
// When it is not set, ipamd defaults to use all available IPs per ENI for that instance type.
// For example, for a m4.4xlarge node,
// If WARM-IP-TARGET is set to 1, and there are 9 pods running on the node, ipamd will try
// to make the "warm pool" have 10 IP addresses with 9 being assigned to pods and 1 free IP.
//
// If "WARM-IP-TARGET is not set, it will default to 30 (which the maximum number of IPs per ENI).
// If there are 9 pods running on the node, ipamd will try to make the "warm pool" have 39 IPs with 9 being
// assigned to pods and 30 free IPs.
envWarmIPTarget = "WARM_IP_TARGET"
noWarmIPTarget = 0
// This environment variable is used to specify the desired minimum number of total IPs.
// When it is not set, ipamd defaults to 0.
// For example, for a m4.4xlarge node,
// If WARM_IP_TARGET is set to 1 and MINIMUM_IP_TARGET is set to 12, and there are 9 pods running on the node,
// ipamd will make the "warm pool" have 12 IP addresses with 9 being assigned to pods and 3 free IPs.
//
// If "MINIMUM_IP_TARGET is not set, it will default to 0, which causes WARM_IP_TARGET settings to be the
// only settings considered.
envMinimumIPTarget = "MINIMUM_IP_TARGET"
noMinimumIPTarget = 0
// This environment is used to specify the desired number of free ENIs along with all of its IP addresses
// always available in "warm pool".
// When it is not set, it is default to 1.
//
// when "WARM-IP-TARGET" is defined, ipamd will use behavior defined for "WARM-IP-TARGET".
//
// For example, for a m4.4xlarge node
// If WARM_ENI_TARGET is set to 2, and there are 9 pods running on the node, ipamd will try to
// make the "warm pool" to have 2 extra ENIs and its IP addresses, in other words, 90 IP addresses
// with 9 IPs assigned to pods and 81 free IPs.
//
// If "WARM_ENI_TARGET" is not set, it defaults to 1, so if there are 9 pods running on the node,
// ipamd will try to make the "warm pool" have 1 extra ENI, in other words, 60 IPs with 9 already
// being assigned to pods and 51 free IPs.
envWarmENITarget = "WARM_ENI_TARGET"
defaultWarmENITarget = 1
// This environment variable is used to specify the maximum number of ENIs that will be allocated.
// When it is not set or less than 1, the default is to use the maximum available for the instance type.
//
// The maximum number of ENIs is in any case limited to the amount allowed for the instance type.
envMaxENI = "MAX_ENI"
defaultMaxENI = -1
// This environment is used to specify whether Pods need to use a security group and subnet defined in an ENIConfig CRD.
// When it is NOT set or set to false, ipamd will use primary interface security group and subnet for Pod network.
envCustomNetworkCfg = "AWS_VPC_K8S_CNI_CUSTOM_NETWORK_CFG"
// eniNoManageTagKey is the tag that may be set on an ENI to indicate ipamd
// should not manage it in any form.
eniNoManageTagKey = "node.k8s.amazonaws.com/no_manage"
// disableENIProvisioning is used to specify that ENI doesn't need to be synced during initializing a pod.
envDisableENIProvisioning = "DISABLE_NETWORK_RESOURCE_PROVISIONING"
// Specify where ipam should persist its current IP<->container allocations.
envBackingStorePath = "AWS_VPC_K8S_CNI_BACKING_STORE"
defaultBackingStorePath = "/var/run/aws-node/ipam.json"
// envEnablePodENI is used to attach a Trunk ENI to every node. Required in order to give Branch ENIs to pods.
envEnablePodENI = "ENABLE_POD_ENI"
// envNodeName will be used to store Node name
envNodeName = "MY_NODE_NAME"
// vpcENIConfigLabel is used by the VPC resource controller to pick the right ENI config.
vpcENIConfigLabel = "vpc.amazonaws.com/eniConfig"
//envEnableIpv4PrefixDelegation is used to allocate /28 prefix instead of secondary IP for an ENI.
envEnableIpv4PrefixDelegation = "ENABLE_PREFIX_DELEGATION"
//envWarmPrefixTarget is used to keep a /28 prefix in warm pool.
envWarmPrefixTarget = "WARM_PREFIX_TARGET"
defaultWarmPrefixTarget = 0
//envEnableIPv4 - Env variable to enable/disable IPv4 mode
envEnableIPv4 = "ENABLE_IPv4"
//envEnableIPv6 - Env variable to enable/disable IPv6 mode
envEnableIPv6 = "ENABLE_IPv6"
ipV4AddrFamily = "4"
ipV6AddrFamily = "6"
//insufficientCidrErrorCooldown is the amount of time reconciler will wait before trying to fetch
//more IPs/prefixes for an ENI. With InsufficientCidr we know the subnet doesn't have enough IPs so
//instead of retrying every 5s which would lead to increase in EC2 AllocIPAddress calls, we wait for
//120 seconds for a retry.
insufficientCidrErrorCooldown = 120 * time.Second
// envManageUntaggedENI is used to determine if untagged ENIs should be managed or unmanaged
envManageUntaggedENI = "MANAGE_UNTAGGED_ENI"
eniNodeTagKey = "node.k8s.amazonaws.com/instance_id"
// envAnnotatePodIP is used to annotate[vpc.amazonaws.com/pod-ips] pod's with IPs
// Ref : https://github.com/projectcalico/calico/issues/3530
// not present; in which case we fall back to the k8s podIP
// Present and set to an IP; in which case we use it
// Present and set to the empty string, which we use to mean "CNI DEL had occurred; networking has been removed from this pod"
// The empty string one helps close a trace at pod shutdown where it looks like the pod still has its IP when the IP has been released
envAnnotatePodIP = "ANNOTATE_POD_IP"
)
var log = logger.Get()
var (
ipamdErr = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "awscni_ipamd_error_count",
Help: "The number of errors encountered in ipamd",
},
[]string{"fn"},
)
ipamdActionsInprogress = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "awscni_ipamd_action_inprogress",
Help: "The number of ipamd actions in progress",
},
[]string{"fn"},
)
enisMax = prometheus.NewGauge(
prometheus.GaugeOpts{
Name: "awscni_eni_max",
Help: "The maximum number of ENIs that can be attached to the instance, accounting for unmanaged ENIs",
},
)
ipMax = prometheus.NewGauge(
prometheus.GaugeOpts{
Name: "awscni_ip_max",
Help: "The maximum number of IP addresses that can be allocated to the instance",
},
)
reconcileCnt = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "awscni_reconcile_count",
Help: "The number of times ipamd reconciles on ENIs and IP/Prefix addresses",
},
[]string{"fn"},
)
addIPCnt = prometheus.NewCounter(
prometheus.CounterOpts{
Name: "awscni_add_ip_req_count",
Help: "The number of add IP address requests",
},
)
delIPCnt = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "awscni_del_ip_req_count",
Help: "The number of delete IP address requests",
},
[]string{"reason"},
)
podENIErr = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "awscni_pod_eni_error_count",
Help: "The number of errors encountered for pod ENIs",
},
[]string{"fn"},
)
prometheusRegistered = false
)
// IPAMContext contains node level control information
type IPAMContext struct {
awsClient awsutils.APIs
dataStore *datastore.DataStore
rawK8SClient client.Client
cachedK8SClient client.Client
enableIPv4 bool
enableIPv6 bool
useCustomNetworking bool
networkClient networkutils.NetworkAPIs
maxIPsPerENI int
maxENI int
maxPrefixesPerENI int
unmanagedENI int
warmENITarget int
warmIPTarget int
minimumIPTarget int
warmPrefixTarget int
primaryIP map[string]string // primaryIP is a map from ENI ID to primary IP of that ENI
lastNodeIPPoolAction time.Time
lastDecreaseIPPool time.Time
// reconcileCooldownCache keeps timestamps of the last time an IP address was unassigned from an ENI,
// so that we don't reconcile and add it back too quickly if IMDS lags behind reality.
reconcileCooldownCache ReconcileCooldownCache
terminating int32 // Flag to warn that the pod is about to shut down.
disableENIProvisioning bool
enablePodENI bool
myNodeName string
enablePrefixDelegation bool
lastInsufficientCidrError time.Time
enableManageUntaggedMode bool
enablePodIPAnnotation bool
}
// setUnmanagedENIs will rebuild the set of ENI IDs for ENIs tagged as "no_manage"
func (c *IPAMContext) setUnmanagedENIs(tagMap map[string]awsutils.TagMap) {
if len(tagMap) == 0 {
return
}
var unmanagedENIlist []string
// if "no_manage" tag is present and is true - ENI is unmanaged
// if "no_manage" tag is present and is "not true" - ENI is managed
// if "instance_id" tag is present and is set to instanceID - ENI is managed since this was created by IPAMD
// if "no_manage" tag is not present or not IPAMD created ENI, check if we are in Manage Untagged Mode, default is true.
// if enableManageUntaggedMode is false, then consider all untagged ENIs as unmanaged.
for eniID, tags := range tagMap {
if _, found := tags[eniNoManageTagKey]; found {
if tags[eniNoManageTagKey] != "true" {
continue
}
} else if _, found := tags[eniNodeTagKey]; found && tags[eniNodeTagKey] == c.awsClient.GetInstanceID() {
continue
} else if c.enableManageUntaggedMode {
continue
}
if eniID == c.awsClient.GetPrimaryENI() {
log.Debugf("Ignoring primary ENI %s since it is always managed", eniID)
} else {
log.Debugf("Marking ENI %s as being unmanaged", eniID)
unmanagedENIlist = append(unmanagedENIlist, eniID)
}
}
c.awsClient.SetUnmanagedENIs(unmanagedENIlist)
}
// ReconcileCooldownCache keep track of recently freed CIDRs to avoid reading stale EC2 metadata
type ReconcileCooldownCache struct {
sync.RWMutex
cache map[string]time.Time
}
// Add sets a timestamp for the CIDR added that says how long they are not to be put back in the data store.
func (r *ReconcileCooldownCache) Add(cidr string) {
r.Lock()
defer r.Unlock()
expiry := time.Now().Add(ipReconcileCooldown)
r.cache[cidr] = expiry
}
// Remove removes a CIDR from the cooldown cache.
func (r *ReconcileCooldownCache) Remove(cidr string) {
r.Lock()
defer r.Unlock()
log.Debugf("Removing %s from cooldown cache.", cidr)
delete(r.cache, cidr)
}
// RecentlyFreed checks if this CIDR was recently freed.
func (r *ReconcileCooldownCache) RecentlyFreed(cidr string) (found, recentlyFreed bool) {
r.Lock()
defer r.Unlock()
now := time.Now()
if expiry, ok := r.cache[cidr]; ok {
log.Debugf("Checking if CIDR %s has been recently freed. Cooldown expires at: %s. (Cooldown: %v)", cidr, expiry, now.Sub(expiry) < 0)
return true, now.Sub(expiry) < 0
}
return false, false
}
func prometheusRegister() {
if !prometheusRegistered {
prometheus.MustRegister(ipamdErr)
prometheus.MustRegister(ipamdActionsInprogress)
prometheus.MustRegister(enisMax)
prometheus.MustRegister(ipMax)
prometheus.MustRegister(reconcileCnt)
prometheus.MustRegister(addIPCnt)
prometheus.MustRegister(delIPCnt)
prometheus.MustRegister(podENIErr)
prometheusRegistered = true
}
}
// containsInsufficientCidrBlocksError returns whether exceeds ENI's IP address limit
func containsInsufficientCidrBlocksError(err error) bool {
var awsErr awserr.Error
if errors.As(err, &awsErr) {
return awsErr.Code() == "InsufficientCidrBlocks"
}
return false
}
// inInsufficientCidrCoolingPeriod checks whether IPAMD is in insufficientCidrErrorCooldown
func (c *IPAMContext) inInsufficientCidrCoolingPeriod() bool {
return time.Since(c.lastInsufficientCidrError) <= insufficientCidrErrorCooldown
}
// New retrieves IP address usage information from Instance MetaData service and Kubelet
// then initializes IP address pool data store
func New(rawK8SClient client.Client, cachedK8SClient client.Client) (*IPAMContext, error) {
prometheusRegister()
c := &IPAMContext{}
c.rawK8SClient = rawK8SClient
c.cachedK8SClient = cachedK8SClient
c.networkClient = networkutils.New()
c.useCustomNetworking = UseCustomNetworkCfg()
c.enablePrefixDelegation = usePrefixDelegation()
c.enableIPv4 = isIPv4Enabled()
c.enableIPv6 = isIPv6Enabled()
c.disableENIProvisioning = disablingENIProvisioning()
client, err := awsutils.New(c.useCustomNetworking, c.disableENIProvisioning, c.enableIPv4, c.enableIPv6)
if err != nil {
return nil, errors.Wrap(err, "ipamd: can not initialize with AWS SDK interface")
}
c.awsClient = client
c.primaryIP = make(map[string]string)
c.reconcileCooldownCache.cache = make(map[string]time.Time)
//WARM and Min IP/Prefix targets are ignored in IPv6 mode
c.warmENITarget = getWarmENITarget()
c.warmIPTarget = getWarmIPTarget()
c.minimumIPTarget = getMinimumIPTarget()
c.warmPrefixTarget = getWarmPrefixTarget()
c.enablePodENI = enablePodENI()
c.enableManageUntaggedMode = enableManageUntaggedMode()
c.enablePodIPAnnotation = enablePodIPAnnotation()
err = c.awsClient.FetchInstanceTypeLimits()
if err != nil {
log.Errorf("Failed to get ENI limits from file:vpc_ip_limits or EC2 for %s", c.awsClient.GetInstanceType())
return nil, err
}
//Let's validate if the configured combination of env variables is supported before we
//proceed any further
if !c.isConfigValid() {
return nil, err
}
c.awsClient.InitCachedPrefixDelegation(c.enablePrefixDelegation)
c.myNodeName = os.Getenv("MY_NODE_NAME")
checkpointer := datastore.NewJSONFile(dsBackingStorePath())
c.dataStore = datastore.NewDataStore(log, checkpointer, c.enablePrefixDelegation)
err = c.nodeInit()
if err != nil {
return nil, err
}
mac := c.awsClient.GetPrimaryENImac()
// retrieve security groups
if c.enableIPv4 || !c.disableENIProvisioning {
err = c.awsClient.RefreshSGIDs(mac)
if err != nil {
return nil, err
}
// Refresh security groups and VPC CIDR blocks in the background
// Ignoring errors since we will retry in 30s
go wait.Forever(func() { _ = c.awsClient.RefreshSGIDs(mac) }, 30*time.Second)
}
return c, nil
}
func (c *IPAMContext) nodeInit() error {
ipamdActionsInprogress.WithLabelValues("nodeInit").Add(float64(1))
defer ipamdActionsInprogress.WithLabelValues("nodeInit").Sub(float64(1))
var err error
var vpcV4CIDRs []string
ctx := context.TODO()
log.Debugf("Start node init")
primaryV4IP := c.awsClient.GetLocalIPv4()
err = c.initENIAndIPLimits()
if c.enableIPv4 {
//Subnets currently will have both v4 and v6 CIDRs. Once EC2 launches v6 only Subnets, that will no longer
//be true and so it is safe (and only required) to get the v4 CIDR info only when IPv4 mode is enabled.
vpcV4CIDRs, err = c.awsClient.GetVPCIPv4CIDRs()
if err != nil {
return err
}
}
err = c.networkClient.SetupHostNetwork(vpcV4CIDRs, c.awsClient.GetPrimaryENImac(), &primaryV4IP, c.enablePodENI, c.enableIPv4,
c.enableIPv6)
if err != nil {
return errors.Wrap(err, "ipamd init: failed to set up host network")
}
metadataResult, err := c.awsClient.DescribeAllENIs()
if err != nil {
return errors.New("ipamd init: failed to retrieve attached ENIs info")
}
log.Debugf("DescribeAllENIs success: ENIs: %d, tagged: %d", len(metadataResult.ENIMetadata), len(metadataResult.TagMap))
c.awsClient.SetCNIUnmanagedENIs(metadataResult.MultiCardENIIDs)
c.setUnmanagedENIs(metadataResult.TagMap)
enis := c.filterUnmanagedENIs(metadataResult.ENIMetadata)
for _, eni := range enis {
log.Debugf("Discovered ENI %s, trying to set it up", eni.ENIID)
isTrunkENI := eni.ENIID == metadataResult.TrunkENI
isEFAENI := metadataResult.EFAENIs[eni.ENIID]
if !isTrunkENI && !c.disableENIProvisioning {
if err := c.awsClient.TagENI(eni.ENIID, metadataResult.TagMap[eni.ENIID]); err != nil {
return errors.Wrapf(err, "ipamd init: failed to tag managed ENI %v", eni.ENIID)
}
}
// Retry ENI sync
retry := 0
for {
retry++
if err = c.setupENI(eni.ENIID, eni, isTrunkENI, isEFAENI); err == nil {
log.Infof("ENI %s set up.", eni.ENIID)
break
}
if retry > maxRetryCheckENI {
log.Warnf("Reached max retry: Unable to discover attached IPs for ENI from metadata service (attempted %d/%d): %v", retry, maxRetryCheckENI, err)
ipamdErrInc("waitENIAttachedMaxRetryExceeded")
break
}
log.Warnf("Error trying to set up ENI %s: %v", eni.ENIID, err)
if strings.Contains(err.Error(), "setupENINetwork: failed to find the link which uses MAC address") {
// If we can't find the matching link for this MAC address, there is no point in retrying for this ENI.
log.Debug("Unable to match link for this ENI, going to the next one.")
break
}
log.Debugf("Unable to discover IPs for this ENI yet (attempt %d/%d)", retry, maxRetryCheckENI)
time.Sleep(eniAttachTime)
}
}
if err := c.dataStore.ReadBackingStore(c.enableIPv6); err != nil {
return err
}
if c.enableIPv6 {
//We will not support upgrading/converting an existing IPv4 cluster to operate in IPv6 mode. So, we will always
//start with a clean slate in IPv6 mode. We also don't have to deal with dynamic update of Prefix Delegation
//feature in IPv6 mode as we don't support (yet) a non-PD v6 option. In addition, we don't support custom
//networking & SGPP in IPv6 mode yet. So, we will skip the corresponding setup. Will save us from checking
//if IPv6 is enabled at multiple places. Once we start supporting these features in IPv6 mode, we can do away
//with this check and not change anything else in the below setup.
return nil
}
if c.enablePrefixDelegation {
//During upgrade or if prefix delgation knob is disabled to enabled then we
//might have secondary IPs attached to ENIs so doing a cleanup if not used before moving on
c.tryUnassignIPsFromENIs()
} else {
//When prefix delegation knob is enabled to disabled then we might
//have unused prefixes attached to the ENIs so need to cleanup
c.tryUnassignPrefixesFromENIs()
}
if err = c.configureIPRulesForPods(); err != nil {
return err
}
// Spawning updateCIDRsRulesOnChange go-routine
go wait.Forever(func() {
vpcV4CIDRs = c.updateCIDRsRulesOnChange(vpcV4CIDRs)
}, 30*time.Second)
eniConfigName, err := eniconfig.GetNodeSpecificENIConfigName(ctx, c.cachedK8SClient)
if err == nil && c.useCustomNetworking && eniConfigName != "default" {
// Signal to VPC Resource Controller that the node is using custom networking
err := c.SetNodeLabel(ctx, vpcENIConfigLabel, eniConfigName)
if err != nil {
log.Errorf("Failed to set eniConfig node label", err)
podENIErrInc("nodeInit")
return err
}
} else {
// Remove the custom networking label
err := c.SetNodeLabel(ctx, vpcENIConfigLabel, "")
if err != nil {
log.Errorf("Failed to delete eniConfig node label", err)
podENIErrInc("nodeInit")
return err
}
}
if metadataResult.TrunkENI != "" {
// Signal to VPC Resource Controller that the node has a trunk already
err := c.SetNodeLabel(ctx, "vpc.amazonaws.com/has-trunk-attached", "true")
if err != nil {
log.Errorf("Failed to set node label", err)
podENIErrInc("nodeInit")
// If this fails, we probably can't talk to the API server. Let the pod restart
return err
}
} else {
// Check if we want to ask for one
c.askForTrunkENIIfNeeded(ctx)
}
if !c.disableENIProvisioning {
// For a new node, attach Cidrs (secondary ips/prefixes)
increasedPool, err := c.tryAssignCidrs()
if err == nil && increasedPool {
c.updateLastNodeIPPoolAction()
} else if err != nil {
if containsInsufficientCidrBlocksError(err) {
log.Errorf("Unable to attach IPs/Prefixes for the ENI, subnet doesn't seem to have enough IPs/Prefixes. Consider using new subnet or carve a reserved range using create-subnet-cidr-reservation")
c.lastInsufficientCidrError = time.Now()
return nil
}
return err
}
}
return nil
}
func (c *IPAMContext) configureIPRulesForPods() error {
rules, err := c.networkClient.GetRuleList()
if err != nil {
log.Errorf("During ipamd init: failed to retrieve IP rule list %v", err)
return nil
}
for _, info := range c.dataStore.AllocatedIPs() {
// TODO(gus): This should really be done via CNI CHECK calls, rather than in ipam (requires upstream k8s changes).
// Update ip rules in case there is a change in VPC CIDRs, AWS_VPC_K8S_CNI_EXTERNALSNAT setting
srcIPNet := net.IPNet{IP: net.ParseIP(info.IP), Mask: net.IPv4Mask(255, 255, 255, 255)}
err = c.networkClient.UpdateRuleListBySrc(rules, srcIPNet)
if err != nil {
log.Warnf("UpdateRuleListBySrc in nodeInit() failed for IP %s: %v", info.IP, err)
}
}
return nil
}
func (c *IPAMContext) updateCIDRsRulesOnChange(oldVPCCIDRs []string) []string {
newVPCCIDRs, err := c.awsClient.GetVPCIPv4CIDRs()
if err != nil {
log.Warnf("skipping periodic update to VPC CIDRs due to error: %v", err)
return oldVPCCIDRs
}
old := sets.NewString(oldVPCCIDRs...)
new := sets.NewString(newVPCCIDRs...)
if !old.Equal(new) {
primaryIP := c.awsClient.GetLocalIPv4()
err = c.networkClient.UpdateHostIptablesRules(newVPCCIDRs, c.awsClient.GetPrimaryENImac(), &primaryIP, c.enableIPv4,
c.enableIPv6)
if err != nil {
log.Warnf("unable to update host iptables rules for VPC CIDRs due to error: %v", err)
}
}
return newVPCCIDRs
}
func (c *IPAMContext) updateIPStats(unmanaged int) {
ipMax.Set(float64(c.maxIPsPerENI * (c.maxENI - unmanaged)))
enisMax.Set(float64(c.maxENI - unmanaged))
}
// StartNodeIPPoolManager monitors the IP pool, add or del them when it is required.
func (c *IPAMContext) StartNodeIPPoolManager() {
if c.enableIPv6 {
//Nothing to do in IPv6 Mode. IPv6 is only supported in Prefix delegation mode
//and VPC CNI will only attach one V6 Prefix.
return
}
sleepDuration := ipPoolMonitorInterval / 2
ctx := context.Background()
for {
if !c.disableENIProvisioning {
time.Sleep(sleepDuration)
c.updateIPPoolIfRequired(ctx)
}
time.Sleep(sleepDuration)
c.nodeIPPoolReconcile(ctx, nodeIPPoolReconcileInterval)
}
}
func (c *IPAMContext) updateIPPoolIfRequired(ctx context.Context) {
c.askForTrunkENIIfNeeded(ctx)
if c.isDatastorePoolTooLow() {
c.increaseDatastorePool(ctx)
} else if c.isDatastorePoolTooHigh() {
c.decreaseDatastorePool(decreaseIPPoolInterval)
}
if c.shouldRemoveExtraENIs() {
c.tryFreeENI()
}
}
// decreaseDatastorePool runs every `interval` and attempts to return unused ENIs and IPs
func (c *IPAMContext) decreaseDatastorePool(interval time.Duration) {
ipamdActionsInprogress.WithLabelValues("decreaseDatastorePool").Add(float64(1))
defer ipamdActionsInprogress.WithLabelValues("decreaseDatastorePool").Sub(float64(1))
now := time.Now()
timeSinceLast := now.Sub(c.lastDecreaseIPPool)
if timeSinceLast <= interval {
log.Debugf("Skipping decrease Datastore pool because time since last %v <= %v", timeSinceLast, interval)
return
}
log.Debugf("Starting to decrease Datastore pool")
c.tryUnassignCidrsFromAll()
c.lastDecreaseIPPool = now
c.lastNodeIPPoolAction = now
total, used, _, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
log.Debugf("Successfully decreased IP pool")
logPoolStats(total, used, cooldownIPs, c.maxIPsPerENI, c.enablePrefixDelegation)
}
// tryFreeENI always tries to free one ENI
func (c *IPAMContext) tryFreeENI() {
if c.isTerminating() {
log.Debug("AWS CNI is terminating, not detaching any ENIs")
return
}
eni := c.dataStore.RemoveUnusedENIFromStore(c.warmIPTarget, c.minimumIPTarget, c.warmPrefixTarget)
if eni == "" {
return
}
log.Debugf("Start freeing ENI %s", eni)
err := c.awsClient.FreeENI(eni)
if err != nil {
ipamdErrInc("decreaseIPPoolFreeENIFailed")
log.Errorf("Failed to free ENI %s, err: %v", eni, err)
return
}
}
// tryUnassignIPsorPrefixesFromAll determines if there are IPs to free when we have extra IPs beyond the target and warmIPTargetDefined
// is enabled, deallocate extra IP addresses
func (c *IPAMContext) tryUnassignCidrsFromAll() {
_, over, warmTargetDefined := c.datastoreTargetState()
//WARM IP targets not defined then check if WARM_PREFIX_TARGET is defined.
if !warmTargetDefined {
over = c.computeExtraPrefixesOverWarmTarget()
}
if over > 0 {
eniInfos := c.dataStore.GetENIInfos()
for eniID := range eniInfos.ENIs {
//Either returns prefixes or IPs [Cidrs]
cidrs := c.dataStore.FindFreeableCidrs(eniID)
if cidrs == nil {
log.Errorf("Error finding unassigned IPs for ENI %s", eniID)
return
}
// Free the number of Cidrs `over` the warm IP target, unless `over` is greater than the number of available Cidrs on
// this ENI. In that case we should only free the number of available Cidrs.
numFreeable := min(over, len(cidrs))
cidrs = cidrs[:numFreeable]
if len(cidrs) == 0 {
continue
}
// Delete IPs from datastore
var deletedCidrs []datastore.CidrInfo
for _, toDelete := range cidrs {
// Don't force the delete, since a freeable Cidrs might have been assigned to a pod
// before we get around to deleting it.
err := c.dataStore.DelIPv4CidrFromStore(eniID, toDelete.Cidr, false /* force */)
if err != nil {
log.Warnf("Failed to delete Cidr %s on ENI %s from datastore: %s", toDelete, eniID, err)
ipamdErrInc("decreaseIPPool")
continue
} else {
deletedCidrs = append(deletedCidrs, toDelete)
}
}
// Deallocate Cidrs from the instance if they aren't used by pods.
c.DeallocCidrs(eniID, deletedCidrs)
}
}
}
func (c *IPAMContext) increaseDatastorePool(ctx context.Context) {
log.Debug("Starting to increase pool size")
ipamdActionsInprogress.WithLabelValues("increaseDatastorePool").Add(float64(1))
defer ipamdActionsInprogress.WithLabelValues("increaseDatastorePool").Sub(float64(1))
short, _, warmIPTargetDefined := c.datastoreTargetState()
if warmIPTargetDefined && short == 0 {
log.Debugf("Skipping increase Datastore pool, warm target reached")
return
}
if !warmIPTargetDefined {
shortPrefix, warmTargetDefined := c.datastorePrefixTargetState()
if warmTargetDefined && shortPrefix == 0 {
log.Debugf("Skipping increase Datastore pool, warm prefix target reached")
return
}
}
if c.isTerminating() {
log.Debug("AWS CNI is terminating, will not try to attach any new IPs or ENIs right now")
return
}
// Try to add more Cidrs to existing ENIs first.
if c.inInsufficientCidrCoolingPeriod() {
log.Debugf("Recently we had InsufficientCidr error hence will wait for %v before retrying", insufficientCidrErrorCooldown)
return
}
increasedPool, err := c.tryAssignCidrs()
if err != nil {
log.Errorf(err.Error())
if containsInsufficientCidrBlocksError(err) {
log.Errorf("Unable to attach IPs/Prefixes for the ENI, subnet doesn't seem to have enough IPs/Prefixes. Consider using new subnet or carve a reserved range using create-subnet-cidr-reservation")
c.lastInsufficientCidrError = time.Now()
return
}
}
if increasedPool {
c.updateLastNodeIPPoolAction()
} else {
// Check if we need to make room for the VPC Resource Controller to attach a trunk ENI
reserveSlotForTrunkENI := 0
if c.enablePodENI && c.dataStore.GetTrunkENI() == "" {
reserveSlotForTrunkENI = 1
}
// If we did not add an IP, try to add an ENI instead.
if c.dataStore.GetENIs() < (c.maxENI - c.unmanagedENI - reserveSlotForTrunkENI) {
if err = c.tryAllocateENI(ctx); err == nil {
c.updateLastNodeIPPoolAction()
}
} else {
log.Debugf("Skipping ENI allocation as the max ENI limit of %d is already reached (accounting for %d unmanaged ENIs and %d trunk ENIs)",
c.maxENI, c.unmanagedENI, reserveSlotForTrunkENI)
}
}
}
func (c *IPAMContext) updateLastNodeIPPoolAction() {
c.lastNodeIPPoolAction = time.Now()
total, used, totalPrefix, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
if !c.enablePrefixDelegation {
log.Debugf("Successfully increased IP pool, total: %d, used: %d", total, used)
} else if c.enablePrefixDelegation {
log.Debugf("Successfully increased Prefix pool, total: %d, used: %d", totalPrefix, used)
}
logPoolStats(total, used, cooldownIPs, c.maxIPsPerENI, c.enablePrefixDelegation)
}
func (c *IPAMContext) tryAllocateENI(ctx context.Context) error {
var securityGroups []*string
var subnet string
if c.useCustomNetworking {
eniCfg, err := eniconfig.MyENIConfig(ctx, c.cachedK8SClient)
if err != nil {
log.Errorf("Failed to get pod ENI config")
return err
}
log.Infof("ipamd: using custom network config: %v, %s", eniCfg.SecurityGroups, eniCfg.Subnet)
for _, sgID := range eniCfg.SecurityGroups {
log.Debugf("Found security-group id: %s", sgID)
securityGroups = append(securityGroups, aws.String(sgID))
}
subnet = eniCfg.Subnet
}
eni, err := c.awsClient.AllocENI(c.useCustomNetworking, securityGroups, subnet)
if err != nil {
log.Errorf("Failed to increase pool size due to not able to allocate ENI %v", err)
ipamdErrInc("increaseIPPoolAllocENI")
return err
}
resourcesToAllocate := c.GetENIResourcesToAllocate()
err = c.awsClient.AllocIPAddresses(eni, resourcesToAllocate)
if err != nil {
log.Warnf("Failed to allocate %d IP addresses on an ENI: %v", resourcesToAllocate, err)
// Continue to process the allocated IP addresses
ipamdErrInc("increaseIPPoolAllocIPAddressesFailed")
if containsInsufficientCidrBlocksError(err) {
log.Errorf("Unable to attach IPs/Prefixes for the ENI, subnet doesn't seem to have enough IPs/Prefixes. Consider using new subnet or carve a reserved range using create-subnet-cidr-reservation")
c.lastInsufficientCidrError = time.Now()
return err
}
}
eniMetadata, err := c.awsClient.WaitForENIAndIPsAttached(eni, resourcesToAllocate)
if err != nil {
ipamdErrInc("increaseIPPoolwaitENIAttachedFailed")
log.Errorf("Failed to increase pool size: Unable to discover attached ENI from metadata service %v", err)
return err
}
// The CNI does not create trunk or EFA ENIs, so they will always be false here
err = c.setupENI(eni, eniMetadata, false, false)
if err != nil {
ipamdErrInc("increaseIPPoolsetupENIFailed")
log.Errorf("Failed to increase pool size: %v", err)
return err
}
return err
}
// For an ENI, try to fill in missing IPs on an existing ENI with PD disabled
// try to fill in missing Prefixes on an existing ENI with PD enabled
func (c *IPAMContext) tryAssignCidrs() (increasedPool bool, err error) {
short, _, warmIPTargetDefined := c.datastoreTargetState()
if warmIPTargetDefined && short == 0 {
log.Infof("Warm IP target set and short is 0 so not assigning Cidrs (IPs or Prefixes)")
return false, nil
}
if !warmIPTargetDefined {
shortPrefix, warmTargetDefined := c.datastorePrefixTargetState()
if warmTargetDefined && shortPrefix == 0 {
log.Infof("Warm prefix target set and short is 0 so not assigning Cidrs (Prefixes)")
return false, nil
}
}
if !c.enablePrefixDelegation {
return c.tryAssignIPs()
} else {
return c.tryAssignPrefixes()
}
}
// For an ENI, try to fill in missing IPs on an existing ENI
func (c *IPAMContext) tryAssignIPs() (increasedPool bool, err error) {
// If WARM_IP_TARGET is set, only proceed if we are short of target
short, _, warmIPTargetDefined := c.datastoreTargetState()
if warmIPTargetDefined && short == 0 {
return false, nil
}
// If WARM_IP_TARGET is set we only want to allocate up to that target
// to avoid overallocating and releasing
toAllocate := c.maxIPsPerENI
if warmIPTargetDefined {
toAllocate = short
}
// Find an ENI where we can add more IPs
eni := c.dataStore.GetENINeedsIP(c.maxIPsPerENI, c.useCustomNetworking)
if eni != nil && len(eni.AvailableIPv4Cidrs) < c.maxIPsPerENI {
currentNumberOfAllocatedIPs := len(eni.AvailableIPv4Cidrs)
// Try to allocate all available IPs for this ENI
err = c.awsClient.AllocIPAddresses(eni.ID, int(math.Min(float64(c.maxIPsPerENI-currentNumberOfAllocatedIPs), float64(toAllocate))))
if err != nil {
log.Warnf("failed to allocate all available IP addresses on ENI %s, err: %v", eni.ID, err)
// Try to just get one more IP
err = c.awsClient.AllocIPAddresses(eni.ID, 1)
if err != nil {
ipamdErrInc("increaseIPPoolAllocIPAddressesFailed")
return false, errors.Wrap(err, fmt.Sprintf("failed to allocate one IP addresses on ENI %s, err: %v", eni.ID, err))
}
}
// This call to EC2 is needed to verify which IPs got attached to this ENI.
ec2Addrs, err := c.awsClient.GetIPv4sFromEC2(eni.ID)
if err != nil {
ipamdErrInc("increaseIPPoolGetENIaddressesFailed")
return true, errors.Wrap(err, "failed to get ENI IP addresses during IP allocation")
}
c.addENIsecondaryIPsToDataStore(ec2Addrs, eni.ID)
return true, nil
}
return false, nil
}
func (c *IPAMContext) assignIPv6Prefix(eniID string) (err error) {
log.Debugf("Assigning an IPv6Prefix for ENI: %s", eniID)
//Let's make an EC2 API call to get a list of IPv6 prefixes (if any) that are already attached to the
//current ENI. We will make this call only once during boot up/init and doing so will shield us from any
//IMDS out of sync issues. We only need one v6 prefix per ENI/Node.
ec2v6Prefixes, err := c.awsClient.GetIPv6PrefixesFromEC2(eniID)
if err != nil {
log.Errorf("assignIPv6Prefix; err: %s", err)
return err
}
log.Debugf("ENI %s has %v prefixe(s) attached", eniID, len(ec2v6Prefixes))
//Note: If we find more than one v6 prefix attached to the ENI, VPC CNI will not attempt to free it. VPC CNI
//will only attach a single v6 prefix and it will not attempt to free the additional Prefixes.
//We will add all the prefixes to our datastore. TODO - Should we instead pick one of them. If we do, how to track
//that across restarts?
//Check if we already have v6 Prefix(es) attached
if len(ec2v6Prefixes) == 0 {
//Allocate and attach a v6 Prefix to Primary ENI
log.Debugf("No IPv6 Prefix(es) found for ENI: %s", eniID)
strPrefixes, err := c.awsClient.AllocIPv6Prefixes(eniID)
if err != nil {
return err
}
for _, v6Prefix := range strPrefixes {
ec2v6Prefixes = append(ec2v6Prefixes, &ec2.Ipv6PrefixSpecification{Ipv6Prefix: v6Prefix})
}
log.Debugf("Successfully allocated an IPv6Prefix for ENI: %s", eniID)
} else if len(ec2v6Prefixes) > 1 {
//Found more than one v6 prefix attached to the ENI. VPC CNI will only attach a single v6 prefix
//and it will not attempt to free any additional Prefixes that are already attached.
//Will use the first IPv6 Prefix attached for IP address allocation.
ec2v6Prefixes = []*ec2.Ipv6PrefixSpecification{ec2v6Prefixes[0]}
}
c.addENIv6prefixesToDataStore(ec2v6Prefixes, eniID)
return nil
}
func (c *IPAMContext) tryAssignPrefixes() (increasedPool bool, err error) {
toAllocate := c.getPrefixesNeeded()
// Returns an ENI which has space for more prefixes to be attached, but this
// ENI might not suffice the WARM_IP_TARGET/WARM_PREFIX_TARGET
eni := c.dataStore.GetENINeedsIP(c.maxPrefixesPerENI, c.useCustomNetworking)
if eni != nil {
currentNumberOfAllocatedPrefixes := len(eni.AvailableIPv4Cidrs)
err = c.awsClient.AllocIPAddresses(eni.ID, min((c.maxPrefixesPerENI-currentNumberOfAllocatedPrefixes), toAllocate))
if err != nil {
log.Warnf("failed to allocate all available IPv4 Prefixes on ENI %s, err: %v", eni.ID, err)
// Try to just get one more prefix
err = c.awsClient.AllocIPAddresses(eni.ID, 1)
if err != nil {
ipamdErrInc("increaseIPPoolAllocIPAddressesFailed")
return false, errors.Wrap(err, fmt.Sprintf("failed to allocate one IPv4 prefix on ENI %s, err: %v", eni.ID, err))
}
}
ec2Prefixes, err := c.awsClient.GetIPv4PrefixesFromEC2(eni.ID)
if err != nil {
ipamdErrInc("increaseIPPoolGetENIprefixedFailed")
return true, errors.Wrap(err, "failed to get ENI Prefix addresses during IPv4 Prefix allocation")
}
c.addENIv4prefixesToDataStore(ec2Prefixes, eni.ID)
return true, nil
}
return false, nil
}
// setupENI does following:
// 1) add ENI to datastore
// 2) set up linux ENI related networking stack.
// 3) add all ENI's secondary IP addresses to datastore
func (c *IPAMContext) setupENI(eni string, eniMetadata awsutils.ENIMetadata, isTrunkENI, isEFAENI bool) error {
primaryENI := c.awsClient.GetPrimaryENI()
// Add the ENI to the datastore
err := c.dataStore.AddENI(eni, eniMetadata.DeviceNumber, eni == primaryENI, isTrunkENI, isEFAENI)
if err != nil && err.Error() != datastore.DuplicatedENIError {
return errors.Wrapf(err, "failed to add ENI %s to data store", eni)
}
// Store the primary IP of the ENI
c.primaryIP[eni] = eniMetadata.PrimaryIPv4Address()
if c.enableIPv6 && eni == primaryENI {
//In v6 PD Mode, VPC CNI will only manage primary ENI. Once we start supporting secondary IP and custom
//networking modes for v6, we will relax this restriction. We filter out all the ENIs except Primary ENI
//in v6 mode (prior to landing here), but included the primary ENI check as a safety net.
err := c.assignIPv6Prefix(eni)
if err != nil {
return errors.Wrapf(err, "Failed to allocate IPv6 Prefixes to Primary ENI")
}
} else {
// For secondary ENIs, set up the network
if eni != primaryENI {
err = c.networkClient.SetupENINetwork(c.primaryIP[eni], eniMetadata.MAC, eniMetadata.DeviceNumber, eniMetadata.SubnetIPv4CIDR)
if err != nil {
// Failed to set up the ENI
errRemove := c.dataStore.RemoveENIFromDataStore(eni, true)
if errRemove != nil {
log.Warnf("failed to remove ENI %s: %v", eni, errRemove)
}
delete(c.primaryIP, eni)
return errors.Wrapf(err, "failed to set up ENI %s network", eni)
}
}
log.Infof("Found ENIs having %d secondary IPs and %d Prefixes", len(eniMetadata.IPv4Addresses), len(eniMetadata.IPv4Prefixes))
//Either case add the IPs and prefixes to datastore.
c.addENIsecondaryIPsToDataStore(eniMetadata.IPv4Addresses, eni)
c.addENIv4prefixesToDataStore(eniMetadata.IPv4Prefixes, eni)
}
return nil
}
func (c *IPAMContext) addENIsecondaryIPsToDataStore(ec2PrivateIpAddrs []*ec2.NetworkInterfacePrivateIpAddress, eni string) {
//Add all the secondary IPs
for _, ec2PrivateIpAddr := range ec2PrivateIpAddrs {
if aws.BoolValue(ec2PrivateIpAddr.Primary) {
continue
}
cidr := net.IPNet{IP: net.ParseIP(aws.StringValue(ec2PrivateIpAddr.PrivateIpAddress)), Mask: net.IPv4Mask(255, 255, 255, 255)}
err := c.dataStore.AddIPv4CidrToStore(eni, cidr, false)
if err != nil && err.Error() != datastore.IPAlreadyInStoreError {
log.Warnf("Failed to increase IP pool, failed to add IP %s to data store", ec2PrivateIpAddr.PrivateIpAddress)
// continue to add next address
ipamdErrInc("addENIsecondaryIPsToDataStoreFailed")
}
}
total, assigned, totalPrefix, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
log.Debugf("Datastore Pool stats: total(/32): %d, assigned(/32): %d, cooldownIPs(/32): %d, total prefixes(/28): %d", total, assigned, cooldownIPs, totalPrefix)
}
func (c *IPAMContext) addENIv4prefixesToDataStore(ec2PrefixAddrs []*ec2.Ipv4PrefixSpecification, eni string) {
//Walk thru all prefixes
for _, ec2PrefixAddr := range ec2PrefixAddrs {
strIpv4Prefix := aws.StringValue(ec2PrefixAddr.Ipv4Prefix)
_, ipnet, err := net.ParseCIDR(strIpv4Prefix)
if err != nil {
//Parsing failed, get next prefix
log.Debugf("Parsing failed, moving on to next prefix")
continue
}
cidr := *ipnet
err = c.dataStore.AddIPv4CidrToStore(eni, cidr, true)
if err != nil && err.Error() != datastore.IPAlreadyInStoreError {
log.Warnf("Failed to increase Prefix pool, failed to add Prefix %s to data store", ec2PrefixAddr.Ipv4Prefix)
// continue to add next address
ipamdErrInc("addENIv4prefixesToDataStoreFailed")
}
}
total, assigned, totalPrefix, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
log.Debugf("Datastore Pool stats: total(/32): %d, assigned(/32): %d, cooldownIPs(/32): %d, total prefixes(/28): %d", total, assigned, cooldownIPs, totalPrefix)
}
func (c *IPAMContext) addENIv6prefixesToDataStore(ec2PrefixAddrs []*ec2.Ipv6PrefixSpecification, eni string) {
log.Debugf("Updating datastore with IPv6Prefix(es) for ENI: %v, count: %v", eni, len(ec2PrefixAddrs))
//Walk through all prefixes
for _, ec2PrefixAddr := range ec2PrefixAddrs {
strIpv6Prefix := aws.StringValue(ec2PrefixAddr.Ipv6Prefix)
_, ipnet, err := net.ParseCIDR(strIpv6Prefix)
if err != nil {
//Parsing failed, get next prefix
log.Debugf("Parsing failed, moving on to next prefix")
continue
}
cidr := *ipnet
err = c.dataStore.AddIPv6CidrToStore(eni, cidr, true)
if err != nil && err.Error() != datastore.IPAlreadyInStoreError {
log.Warnf("Failed to increase Prefix pool, failed to add Prefix %s to data store", ec2PrefixAddr.Ipv6Prefix)
// continue to add next address
ipamdErrInc("addENIv6prefixesToDataStoreFailed")
}
}
_, _, totalPrefix, _ := c.dataStore.GetStats(ipV6AddrFamily)
log.Debugf("Datastore Pool stats: total v6 prefixes(/80): %d", totalPrefix)
}
// getMaxENI returns the maximum number of ENIs to attach to this instance. This is calculated as the lesser of
// the limit for the instance type and the value configured via the MAX_ENI environment variable. If the value of
// the environment variable is 0 or less, it will be ignored and the maximum for the instance is returned.
func (c *IPAMContext) getMaxENI() (int, error) {
instanceMaxENI := c.awsClient.GetENILimit()
inputStr, found := os.LookupEnv(envMaxENI)
envMax := defaultMaxENI
if found {
if input, err := strconv.Atoi(inputStr); err == nil && input >= 1 {
log.Debugf("Using MAX_ENI %v", input)
envMax = input
}
}
if envMax >= 1 && envMax < instanceMaxENI {
return envMax, nil
}
return instanceMaxENI, nil
}
func getWarmENITarget() int {
inputStr, found := os.LookupEnv(envWarmENITarget)
if !found {
return defaultWarmENITarget
}
if input, err := strconv.Atoi(inputStr); err == nil {
if input < 0 {
return defaultWarmENITarget
}
log.Debugf("Using WARM_ENI_TARGET %v", input)
return input
}
return defaultWarmENITarget
}
func getWarmPrefixTarget() int {
inputStr, found := os.LookupEnv(envWarmPrefixTarget)
if !found {
return defaultWarmPrefixTarget
}
if input, err := strconv.Atoi(inputStr); err == nil {
if input < 0 {
return defaultWarmPrefixTarget
}
log.Debugf("Using WARM_PREFIX_TARGET %v", input)
return input
}
return defaultWarmPrefixTarget
}
func logPoolStats(total int, used int, cooldownIPs int, maxAddrsPerENI int, Ipv4PrefixDelegation bool) {
if !Ipv4PrefixDelegation {
log.Debugf("IP pool stats: total = %d, used = %d, IPs in Cooldown = %d, c.maxIPsPerENI = %d", total, used, cooldownIPs, maxAddrsPerENI)
} else {
log.Debugf("Prefix pool stats: total = %d, used = %d, IPs in Cooldown = %d, c.maxIPsPerENI = %d", total, used, cooldownIPs, maxAddrsPerENI)
}
}
func (c *IPAMContext) askForTrunkENIIfNeeded(ctx context.Context) {
if c.enablePodENI && c.dataStore.GetTrunkENI() == "" {
// Check that there is room for a trunk ENI to be attached:
if c.dataStore.GetENIs() >= (c.maxENI - c.unmanagedENI) {
log.Debug("No slot available for a trunk ENI to be attached. Not labeling the node")
return
}
// We need to signal that VPC Resource Controller needs to attach a trunk ENI
err := c.SetNodeLabel(ctx, "vpc.amazonaws.com/has-trunk-attached", "false")
if err != nil {
podENIErrInc("askForTrunkENIIfNeeded")
log.Errorf("Failed to set node label", err)
}
}
}
// shouldRemoveExtraENIs returns true if we should attempt to find an ENI to free. When WARM_IP_TARGET is set, we
// always check and do verification in getDeletableENI()
// PD enabled : If the WARM_PREFIX_TARGET is spread across ENIs and we have more than needed then this function will return true.
// but if the number of prefixes are on just one ENI and is more than available even then it returns true so getDeletableENI will
// recheck if we need the ENI for prefix target.
func (c *IPAMContext) shouldRemoveExtraENIs() bool {
_, _, warmTargetDefined := c.datastoreTargetState()
if warmTargetDefined {
return true
}
total, used, _, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
available := total - used
var shouldRemoveExtra bool
// We need the +1 to make sure we are not going below the WARM_ENI_TARGET/WARM_PREFIX_TARGET
warmTarget := (c.warmENITarget + 1)
if c.enablePrefixDelegation {
warmTarget = (c.warmPrefixTarget + 1)
}
shouldRemoveExtra = available >= (warmTarget)*c.maxIPsPerENI
if shouldRemoveExtra {
logPoolStats(total, used, cooldownIPs, c.maxIPsPerENI, c.enablePrefixDelegation)
log.Debugf("It might be possible to remove extra ENIs because available (%d) >= (ENI/Prefix target + 1 (%d) + 1) * addrsPerENI (%d)", available, warmTarget, c.maxIPsPerENI)
} else if c.enablePrefixDelegation {
// When prefix target count is reduced, datastorehigh would have deleted extra prefixes over the warm prefix target.
// Hence available will be less than (warmTarget)*c.maxIPsPerENI but there can be some extra ENIs which are not used hence see if we can clean it up.
shouldRemoveExtra = c.dataStore.CheckFreeableENIexists()
}
return shouldRemoveExtra
}
func (c *IPAMContext) computeExtraPrefixesOverWarmTarget() int {
over := 0
if !c.warmPrefixTargetDefined() {
return over
}
total, used, _, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
available := total - used
freePrefixes := c.dataStore.GetFreePrefixes()
over = max(freePrefixes-c.warmPrefixTarget, 0)
logPoolStats(total, used, cooldownIPs, c.maxIPsPerENI, c.enablePrefixDelegation)
log.Debugf("computeExtraPrefixesOverWarmTarget available %d over %d warm_prefix_target %d", available, over, c.warmPrefixTarget)
return over
}
func ipamdErrInc(fn string) {
ipamdErr.With(prometheus.Labels{"fn": fn}).Inc()
}
func podENIErrInc(fn string) {
podENIErr.With(prometheus.Labels{"fn": fn}).Inc()
}
// nodeIPPoolReconcile reconcile ENI and IP info from metadata service and IP addresses in datastore
func (c *IPAMContext) nodeIPPoolReconcile(ctx context.Context, interval time.Duration) {
curTime := time.Now()
timeSinceLast := curTime.Sub(c.lastNodeIPPoolAction)
if timeSinceLast <= interval {
return
}
ipamdActionsInprogress.WithLabelValues("nodeIPPoolReconcile").Add(float64(1))
defer ipamdActionsInprogress.WithLabelValues("nodeIPPoolReconcile").Sub(float64(1))
log.Debugf("Reconciling ENI/IP pool info because time since last %v <= %v", timeSinceLast, interval)
allENIs, err := c.awsClient.GetAttachedENIs()
if err != nil {
log.Errorf("IP pool reconcile: Failed to get attached ENI info: %v", err.Error())
ipamdErrInc("reconcileFailedGetENIs")
return
}
// We must always have at least the primary ENI of the instance
if allENIs == nil {
log.Error("IP pool reconcile: No ENI found at all in metadata, unable to reconcile")
ipamdErrInc("reconcileFailedGetENIs")
return
}
attachedENIs := c.filterUnmanagedENIs(allENIs)
currentENIs := c.dataStore.GetENIInfos().ENIs
trunkENI := c.dataStore.GetTrunkENI()
// Initialize the set with the known EFA interfaces
efaENIs := c.dataStore.GetEFAENIs()
// Check if a new ENI was added, if so we need to update the tags.
needToUpdateTags := false
for _, attachedENI := range attachedENIs {
if _, ok := currentENIs[attachedENI.ENIID]; !ok {
needToUpdateTags = true
break
}
}
var eniTagMap map[string]awsutils.TagMap
if needToUpdateTags {
log.Debugf("A new ENI added but not by ipamd, updating tags by calling EC2")
metadataResult, err := c.awsClient.DescribeAllENIs()
if err != nil {
log.Warnf("Failed to call EC2 to describe ENIs, aborting reconcile: %v", err)
return
}
if c.enablePodENI && metadataResult.TrunkENI != "" {
// Label the node that we have a trunk
err = c.SetNodeLabel(ctx, "vpc.amazonaws.com/has-trunk-attached", "true")
if err != nil {
podENIErrInc("askForTrunkENIIfNeeded")
log.Errorf("Failed to set node label for trunk. Aborting reconcile", err)
return
}
}
// Update trunk ENI
trunkENI = metadataResult.TrunkENI
// Just copy values of the EFA set
efaENIs = metadataResult.EFAENIs
eniTagMap = metadataResult.TagMap
c.setUnmanagedENIs(metadataResult.TagMap)
c.awsClient.SetCNIUnmanagedENIs(metadataResult.MultiCardENIIDs)
attachedENIs = c.filterUnmanagedENIs(metadataResult.ENIMetadata)
}
// Mark phase
for _, attachedENI := range attachedENIs {
eniIPPool, eniPrefixPool, err := c.dataStore.GetENICIDRs(attachedENI.ENIID)
if err == nil {
// If the attached ENI is in the data store
log.Debugf("Reconcile existing ENI %s IP pool", attachedENI.ENIID)
// Reconcile IP pool
c.eniIPPoolReconcile(eniIPPool, attachedENI, attachedENI.ENIID)
// If the attached ENI is in the data store
log.Debugf("Reconcile existing ENI %s IP prefixes", attachedENI.ENIID)
// Reconcile IP pool
c.eniPrefixPoolReconcile(eniPrefixPool, attachedENI, attachedENI.ENIID)
// Mark action, remove this ENI from currentENIs map
delete(currentENIs, attachedENI.ENIID)
continue
}
isTrunkENI := attachedENI.ENIID == trunkENI
isEFAENI := efaENIs[attachedENI.ENIID]
if !isTrunkENI && !c.disableENIProvisioning {
if err := c.awsClient.TagENI(attachedENI.ENIID, eniTagMap[attachedENI.ENIID]); err != nil {
log.Errorf("IP pool reconcile: failed to tag managed ENI %v: %v", attachedENI.ENIID, err)
ipamdErrInc("eniReconcileAdd")
continue
}
}
// Add new ENI
log.Debugf("Reconcile and add a new ENI %s", attachedENI)
err = c.setupENI(attachedENI.ENIID, attachedENI, isTrunkENI, isEFAENI)
if err != nil {
log.Errorf("IP pool reconcile: Failed to set up ENI %s network: %v", attachedENI.ENIID, err)
ipamdErrInc("eniReconcileAdd")
// Continue if having trouble with ONLY 1 ENI, instead of bailout here?
continue
}
reconcileCnt.With(prometheus.Labels{"fn": "eniReconcileAdd"}).Inc()
}
// Sweep phase: since the marked ENI have been removed, the remaining ones needs to be sweeped
for eni := range currentENIs {
log.Infof("Reconcile and delete detached ENI %s", eni)
// Force the delete, since aws local metadata has told us that this ENI is no longer
// attached, so any IPs assigned from this ENI will no longer work.
err = c.dataStore.RemoveENIFromDataStore(eni, true /* force */)
if err != nil {
log.Errorf("IP pool reconcile: Failed to delete ENI during reconcile: %v", err)
ipamdErrInc("eniReconcileDel")
continue
}
delete(c.primaryIP, eni)
reconcileCnt.With(prometheus.Labels{"fn": "eniReconcileDel"}).Inc()
}
log.Debug("Successfully Reconciled ENI/IP pool")
total, assigned, totalPrefix, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
log.Debugf("IP/Prefix Address Pool stats: total: %d, assigned: %d, cooldownIPs: %d, total prefixes: %d", total, assigned, cooldownIPs, totalPrefix)
c.lastNodeIPPoolAction = curTime
}
func (c *IPAMContext) eniIPPoolReconcile(ipPool []string, attachedENI awsutils.ENIMetadata, eni string) {
attachedENIIPs := attachedENI.IPv4Addresses
needEC2Reconcile := true
// Here we can't trust attachedENI since the IMDS metadata can be stale. We need to check with EC2 API.
// +1 is for the primary IP of the ENI that is not added to the ipPool and not available for pods to use.
if 1+len(ipPool) != len(attachedENIIPs) {
log.Warnf("Instance metadata does not match data store! ipPool: %v, metadata: %v", ipPool, attachedENIIPs)
log.Debugf("We need to check the ENI status by calling the EC2 control plane.")
// Call EC2 to verify IPs on this ENI
ec2Addresses, err := c.awsClient.GetIPv4sFromEC2(eni)
if err != nil {
log.Errorf("Failed to fetch ENI IP addresses! Aborting reconcile of ENI %s", eni)
return
}
attachedENIIPs = ec2Addresses
needEC2Reconcile = false
}
// Add all known attached IPs to the datastore
seenIPs := c.verifyAndAddIPsToDatastore(eni, attachedENIIPs, needEC2Reconcile)
// Sweep phase, delete remaining IPs since they should not remain in the datastore
for _, existingIP := range ipPool {
if seenIPs[existingIP] {
continue
}
log.Debugf("Reconcile and delete IP %s on ENI %s", existingIP, eni)
// Force the delete, since we have verified with EC2 that these secondary IPs are no longer assigned to this ENI
ipv4Addr := net.IPNet{IP: net.ParseIP(existingIP), Mask: net.IPv4Mask(255, 255, 255, 255)}
err := c.dataStore.DelIPv4CidrFromStore(eni, ipv4Addr, true /* force */)
if err != nil {
log.Errorf("Failed to reconcile and delete IP %s on ENI %s, %v", existingIP, eni, err)
ipamdErrInc("ipReconcileDel")
// continue instead of bailout due to one ip
continue
}
reconcileCnt.With(prometheus.Labels{"fn": "eniIPPoolReconcileDel"}).Inc()
}
}
func (c *IPAMContext) eniPrefixPoolReconcile(ipPool []string, attachedENI awsutils.ENIMetadata, eni string) {
attachedENIIPs := attachedENI.IPv4Prefixes
needEC2Reconcile := true
// Here we can't trust attachedENI since the IMDS metadata can be stale. We need to check with EC2 API.
log.Debugf("Found prefix pool count %d for eni %s\n", len(ipPool), eni)
if len(ipPool) != len(attachedENIIPs) {
log.Warnf("Instance metadata does not match data store! ipPool: %v, metadata: %v", ipPool, attachedENIIPs)
log.Debugf("We need to check the ENI status by calling the EC2 control plane.")
// Call EC2 to verify IPs on this ENI
ec2Addresses, err := c.awsClient.GetIPv4PrefixesFromEC2(eni)
if err != nil {
log.Errorf("Failed to fetch ENI IP addresses! Aborting reconcile of ENI %s", eni)
return
}
attachedENIIPs = ec2Addresses
needEC2Reconcile = false
}
// Add all known attached IPs to the datastore
seenIPs := c.verifyAndAddPrefixesToDatastore(eni, attachedENIIPs, needEC2Reconcile)
// Sweep phase, delete remaining Prefixes since they should not remain in the datastore
for _, existingIP := range ipPool {
if seenIPs[existingIP] {
continue
}
log.Debugf("Reconcile and delete Prefix %s on ENI %s", existingIP, eni)
// Force the delete, since we have verified with EC2 that these secondary IPs are no longer assigned to this ENI
_, ipv4Cidr, err := net.ParseCIDR(existingIP)
if err != nil {
log.Debugf("Failed to parse so continuing with next prefix")
continue
}
err = c.dataStore.DelIPv4CidrFromStore(eni, *ipv4Cidr, true /* force */)
if err != nil {
log.Errorf("Failed to reconcile and delete IP %s on ENI %s, %v", existingIP, eni, err)
ipamdErrInc("ipReconcileDel")
// continue instead of bailout due to one ip
continue
}
reconcileCnt.With(prometheus.Labels{"fn": "eniIPPoolReconcileDel"}).Inc()
}
}
// verifyAndAddIPsToDatastore updates the datastore with the known secondary IPs. IPs who are out of cooldown gets added
// back to the datastore after being verified against EC2.
func (c *IPAMContext) verifyAndAddIPsToDatastore(eni string, attachedENIIPs []*ec2.NetworkInterfacePrivateIpAddress, needEC2Reconcile bool) map[string]bool {
var ec2VerifiedAddresses []*ec2.NetworkInterfacePrivateIpAddress
seenIPs := make(map[string]bool)
for _, privateIPv4 := range attachedENIIPs {
strPrivateIPv4 := aws.StringValue(privateIPv4.PrivateIpAddress)
if strPrivateIPv4 == c.primaryIP[eni] {
log.Infof("Reconcile and skip primary IP %s on ENI %s", strPrivateIPv4, eni)
continue
}
// Check if this IP was recently freed
ipv4Addr := net.IPNet{IP: net.ParseIP(strPrivateIPv4), Mask: net.IPv4Mask(255, 255, 255, 255)}
found, recentlyFreed := c.reconcileCooldownCache.RecentlyFreed(strPrivateIPv4)
if found {
if recentlyFreed {
log.Debugf("Reconcile skipping IP %s on ENI %s because it was recently unassigned from the ENI.", strPrivateIPv4, eni)
continue
} else {
if needEC2Reconcile {
// IMDS data might be stale
log.Debugf("This IP was recently freed, but is now out of cooldown. We need to verify with EC2 control plane.")
// Only call EC2 once for this ENI
if ec2VerifiedAddresses == nil {
var err error
// Call EC2 to verify IPs on this ENI
ec2VerifiedAddresses, err = c.awsClient.GetIPv4sFromEC2(eni)
if err != nil {
log.Errorf("Failed to fetch ENI IP addresses from EC2! %v", err)
// Do not delete this IP from the datastore or cooldown until we have confirmed with EC2
seenIPs[strPrivateIPv4] = true
continue
}
}
// Verify that the IP really belongs to this ENI
isReallyAttachedToENI := false
for _, ec2Addr := range ec2VerifiedAddresses {
if strPrivateIPv4 == aws.StringValue(ec2Addr.PrivateIpAddress) {
isReallyAttachedToENI = true
log.Debugf("Verified that IP %s is attached to ENI %s", strPrivateIPv4, eni)
break
}
}
if !isReallyAttachedToENI {
log.Warnf("Skipping IP %s on ENI %s because it does not belong to this ENI!", strPrivateIPv4, eni)
continue
}
}
// The IP can be removed from the cooldown cache
// TODO: Here we could check if the IP is still used by a pod stuck in Terminating state. (Issue #1091)
c.reconcileCooldownCache.Remove(strPrivateIPv4)
}
}
log.Infof("Trying to add %s", strPrivateIPv4)
// Try to add the IP
err := c.dataStore.AddIPv4CidrToStore(eni, ipv4Addr, false)
if err != nil && err.Error() != datastore.IPAlreadyInStoreError {
log.Errorf("Failed to reconcile IP %s on ENI %s", strPrivateIPv4, eni)
ipamdErrInc("ipReconcileAdd")
// Continue to check the other IPs instead of bailout due to one wrong IP
continue
}
// Mark action
seenIPs[strPrivateIPv4] = true
reconcileCnt.With(prometheus.Labels{"fn": "eniDataStorePoolReconcileAdd"}).Inc()
}
return seenIPs
}
// verifyAndAddPrefixesToDatastore updates the datastore with the known Prefixes. Prefixes who are out of cooldown gets added
// back to the datastore after being verified against EC2.
func (c *IPAMContext) verifyAndAddPrefixesToDatastore(eni string, attachedENIPrefixes []*ec2.Ipv4PrefixSpecification, needEC2Reconcile bool) map[string]bool {
var ec2VerifiedAddresses []*ec2.Ipv4PrefixSpecification
seenIPs := make(map[string]bool)
for _, privateIPv4Cidr := range attachedENIPrefixes {
strPrivateIPv4Cidr := aws.StringValue(privateIPv4Cidr.Ipv4Prefix)
log.Debugf("Check in coolddown Found prefix %s", strPrivateIPv4Cidr)
// Check if this Prefix was recently freed
_, ipv4CidrPtr, err := net.ParseCIDR(strPrivateIPv4Cidr)
if err != nil {
log.Debugf("Failed to parse so continuing with next prefix")
continue
}
found, recentlyFreed := c.reconcileCooldownCache.RecentlyFreed(strPrivateIPv4Cidr)
if found {
if recentlyFreed {
log.Debugf("Reconcile skipping IP %s on ENI %s because it was recently unassigned from the ENI.", strPrivateIPv4Cidr, eni)
continue
} else {
if needEC2Reconcile {
// IMDS data might be stale
log.Debugf("This IP was recently freed, but is now out of cooldown. We need to verify with EC2 control plane.")
// Only call EC2 once for this ENI and post GA fix this logic for both prefixes
// and secondary IPs as per "split the loop" comment
if ec2VerifiedAddresses == nil {
var err error
// Call EC2 to verify Prefixes on this ENI
ec2VerifiedAddresses, err = c.awsClient.GetIPv4PrefixesFromEC2(eni)
if err != nil {
log.Errorf("Failed to fetch ENI IP addresses from EC2! %v", err)
// Do not delete this Prefix from the datastore or cooldown until we have confirmed with EC2
seenIPs[strPrivateIPv4Cidr] = true
continue
}
}
// Verify that the Prefix really belongs to this ENI
isReallyAttachedToENI := false
for _, ec2Addr := range ec2VerifiedAddresses {
if strPrivateIPv4Cidr == aws.StringValue(ec2Addr.Ipv4Prefix) {
isReallyAttachedToENI = true
log.Debugf("Verified that IP %s is attached to ENI %s", strPrivateIPv4Cidr, eni)
break
}
}
if !isReallyAttachedToENI {
log.Warnf("Skipping IP %s on ENI %s because it does not belong to this ENI!", strPrivateIPv4Cidr, eni)
continue
}
}
// The IP can be removed from the cooldown cache
// TODO: Here we could check if the Prefix is still used by a pod stuck in Terminating state. (Issue #1091)
c.reconcileCooldownCache.Remove(strPrivateIPv4Cidr)
}
}
err = c.dataStore.AddIPv4CidrToStore(eni, *ipv4CidrPtr, true)
if err != nil && err.Error() != datastore.IPAlreadyInStoreError {
log.Errorf("Failed to reconcile Prefix %s on ENI %s", strPrivateIPv4Cidr, eni)
ipamdErrInc("prefixReconcileAdd")
// Continue to check the other Prefixs instead of bailout due to one wrong IP
continue
}
// Mark action
seenIPs[strPrivateIPv4Cidr] = true
reconcileCnt.With(prometheus.Labels{"fn": "eniDataStorePoolReconcileAdd"}).Inc()
}
return seenIPs
}
// UseCustomNetworkCfg returns whether Pods needs to use pod specific configuration or not.
func UseCustomNetworkCfg() bool {
if strValue := os.Getenv(envCustomNetworkCfg); strValue != "" {
parsedValue, err := strconv.ParseBool(strValue)
if err == nil {
return parsedValue
}
log.Warnf("Failed to parse %s; using default: false, err: %v", envCustomNetworkCfg, err)
}
return false
}
func dsBackingStorePath() string {
if value := os.Getenv(envBackingStorePath); value != "" {
return value
}
return defaultBackingStorePath
}
func getWarmIPTarget() int {
inputStr, found := os.LookupEnv(envWarmIPTarget)
if !found {
return noWarmIPTarget
}
if input, err := strconv.Atoi(inputStr); err == nil {
if input >= 0 {
log.Debugf("Using WARM_IP_TARGET %v", input)
return input
}
}
return noWarmIPTarget
}
func getMinimumIPTarget() int {
inputStr, found := os.LookupEnv(envMinimumIPTarget)
if !found {
return noMinimumIPTarget
}
if input, err := strconv.Atoi(inputStr); err == nil {
if input >= 0 {
log.Debugf("Using MINIMUM_IP_TARGET %v", input)
return input
}
}
return noMinimumIPTarget
}
func disablingENIProvisioning() bool |
func enablePodENI() bool {
return getEnvBoolWithDefault(envEnablePodENI, false)
}
func usePrefixDelegation() bool {
return getEnvBoolWithDefault(envEnableIpv4PrefixDelegation, false)
}
func isIPv4Enabled() bool {
return getEnvBoolWithDefault(envEnableIPv4, false)
}
func isIPv6Enabled() bool {
return getEnvBoolWithDefault(envEnableIPv6, false)
}
func enableManageUntaggedMode() bool {
return getEnvBoolWithDefault(envManageUntaggedENI, true)
}
func enablePodIPAnnotation() bool {
return getEnvBoolWithDefault(envAnnotatePodIP, false)
}
// filterUnmanagedENIs filters out ENIs marked with the "node.k8s.amazonaws.com/no_manage" tag
func (c *IPAMContext) filterUnmanagedENIs(enis []awsutils.ENIMetadata) []awsutils.ENIMetadata {
numFiltered := 0
ret := make([]awsutils.ENIMetadata, 0, len(enis))
for _, eni := range enis {
//Filter out any Unmanaged ENIs. VPC CNI will only work with Primary ENI in IPv6 Prefix Delegation mode until
//we open up IPv6 support in Secondary IP and Custom networking modes. Filtering out the ENIs here will
//help us avoid myriad of if/else loops elsewhere in the code.
if c.enableIPv6 && !c.awsClient.IsPrimaryENI(eni.ENIID) {
log.Debugf("Skipping ENI %s: IPv6 Mode is enabled and VPC CNI will only manage Primary ENI in v6 PD mode",
eni.ENIID)
numFiltered++
continue
} else if c.awsClient.IsUnmanagedENI(eni.ENIID) {
log.Debugf("Skipping ENI %s: since it is unmanaged", eni.ENIID)
numFiltered++
continue
} else if c.awsClient.IsCNIUnmanagedENI(eni.ENIID) {
log.Debugf("Skipping ENI %s: since on non-zero network card", eni.ENIID)
numFiltered++
continue
}
ret = append(ret, eni)
}
c.unmanagedENI = numFiltered
c.updateIPStats(numFiltered)
return ret
}
// datastoreTargetState determines the number of IPs `short` or `over` our WARM_IP_TARGET,
// accounting for the MINIMUM_IP_TARGET
// With prefix delegation this function determines the number of Prefixes `short` or `over`
func (c *IPAMContext) datastoreTargetState() (short int, over int, enabled bool) {
if c.warmIPTarget == noWarmIPTarget && c.minimumIPTarget == noMinimumIPTarget {
// there is no WARM_IP_TARGET defined and no MINIMUM_IP_TARGET, fallback to use all IP addresses on ENI
return 0, 0, false
}
total, assigned, totalPrefix, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
available := total - assigned
// short is greater than 0 when we have fewer available IPs than the warm IP target
short = max(c.warmIPTarget-available, 0)
// short is greater than the warm IP target alone when we have fewer total IPs than the minimum target
short = max(short, c.minimumIPTarget-total)
// over is the number of available IPs we have beyond the warm IP target
over = max(available-c.warmIPTarget, 0)
// over is less than the warm IP target alone if it would imply reducing total IPs below the minimum target
over = max(min(over, total-c.minimumIPTarget), 0)
if c.enablePrefixDelegation {
//short : number of IPs short to reach warm targets
//over : number of IPs over the warm targets
_, numIPsPerPrefix, _ := datastore.GetPrefixDelegationDefaults()
// Number of prefixes IPAMD is short of to achieve warm targets
shortPrefix := datastore.DivCeil(short, numIPsPerPrefix)
// Over will have number of IPs more than needed but with PD we would have allocated in chunks of /28
// Say assigned = 1, warm ip target = 16, this will need 2 prefixes. But over will return 15.
// Hence we need to check if 'over' number of IPs are needed to maintain the warm targets
prefixNeededForWarmIP := datastore.DivCeil(assigned+c.warmIPTarget, numIPsPerPrefix)
prefixNeededForMinIP := datastore.DivCeil(c.minimumIPTarget, numIPsPerPrefix)
// over will be number of prefixes over than needed but could be spread across used prefixes,
// say, after couple of pod churns, 3 prefixes are allocated with 1 IP each assigned and warm ip target is 15
// (J : is this needed? since we have to walk thru the loop of prefixes)
freePrefixes := c.dataStore.GetFreePrefixes()
overPrefix := max(min(freePrefixes, totalPrefix-prefixNeededForWarmIP), 0)
overPrefix = max(min(overPrefix, totalPrefix-prefixNeededForMinIP), 0)
log.Debugf("Current warm IP stats : target: %d, total: %d, assigned: %d, available: %d, short(prefixes): %d, over(prefixes): %d", c.warmIPTarget, total, assigned, available, shortPrefix, overPrefix)
return shortPrefix, overPrefix, true
}
log.Debugf("Current warm IP stats: target: %d, total: %d, assigned: %d, available: %d, cooldown: %d, short: %d, over %d", c.warmIPTarget, total, assigned, available, cooldownIPs, short, over)
return short, over, true
}
// datastorePrefixTargetState determines the number of prefixes short to reach WARM_PREFIX_TARGET
func (c *IPAMContext) datastorePrefixTargetState() (short int, enabled bool) {
if !c.warmPrefixTargetDefined() {
return 0, false
}
// /28 will consume 16 IPs so let's not allocate if not needed.
freePrefixesInStore := c.dataStore.GetFreePrefixes()
toAllocate := max(c.warmPrefixTarget-freePrefixesInStore, 0)
log.Debugf("Prefix target is %d, short of %d prefixes, free %d prefixes", c.warmPrefixTarget, toAllocate, freePrefixesInStore)
return toAllocate, true
}
// setTerminating atomically sets the terminating flag.
func (c *IPAMContext) setTerminating() {
atomic.StoreInt32(&c.terminating, 1)
}
func (c *IPAMContext) isTerminating() bool {
return atomic.LoadInt32(&c.terminating) > 0
}
// GetConfigForDebug returns the active values of the configuration env vars (for debugging purposes).
func GetConfigForDebug() map[string]interface{} {
return map[string]interface{}{
envWarmIPTarget: getWarmIPTarget(),
envWarmENITarget: getWarmENITarget(),
envCustomNetworkCfg: UseCustomNetworkCfg(),
}
}
func max(x, y int) int {
if x < y {
return y
}
return x
}
func min(x, y int) int {
if y < x {
return y
}
return x
}
func (c *IPAMContext) getTrunkLinkIndex() (int, error) {
trunkENI := c.dataStore.GetTrunkENI()
attachedENIs, err := c.awsClient.GetAttachedENIs()
if err != nil {
return -1, err
}
for _, eni := range attachedENIs {
if eni.ENIID == trunkENI {
retryLinkByMacInterval := 100 * time.Millisecond
link, err := c.networkClient.GetLinkByMac(eni.MAC, retryLinkByMacInterval)
if err != nil {
return -1, err
}
return link.Attrs().Index, nil
}
}
return -1, errors.New("no trunk found")
}
// SetNodeLabel sets or deletes a node label
func (c *IPAMContext) SetNodeLabel(ctx context.Context, key, value string) error {
var node corev1.Node
// Find my node
err := c.cachedK8SClient.Get(ctx, types.NamespacedName{Name: c.myNodeName}, &node)
log.Debugf("Node found %q - labels - %q", node.Name, len(node.Labels))
if err != nil {
log.Errorf("Failed to get node: %v", err)
return err
}
if labelValue, ok := node.Labels[key]; ok && labelValue == value {
log.Debugf("Node label %q is already %q", key, labelValue)
return nil
}
// Make deep copy for modification
updateNode := node.DeepCopy()
// Set node label
if value != "" {
updateNode.Labels[key] = value
} else {
// Empty value, delete the label
log.Debugf("Deleting label %q", key)
delete(updateNode.Labels, key)
}
// Update node status to advertise the resource.
err = c.cachedK8SClient.Update(ctx, updateNode)
if err != nil {
log.Errorf("Failed to update node %s with label %q: %q, error: %v", c.myNodeName, key, value, err)
}
log.Debugf("Updated node %s with label %q: %q", c.myNodeName, key, value)
return nil
}
// GetPod returns the pod matching the name and namespace
func (c *IPAMContext) GetPod(podName, namespace string) (*corev1.Pod, error) {
ctx := context.TODO()
var pod corev1.Pod
podKey := types.NamespacedName{
Namespace: namespace,
Name: podName,
}
err := c.rawK8SClient.Get(ctx, podKey, &pod)
if err != nil {
return nil, fmt.Errorf("Error while trying to retrieve Pod Info: %s", err)
}
return &pod, nil
}
// AnnotatePod annotates the pod with the provided key and value
func (c *IPAMContext) AnnotatePod(podNamespace, podName, key, val string) error {
ctx := context.TODO()
var pod *corev1.Pod
var err error
err = retry.RetryOnConflict(retry.DefaultBackoff, func() error {
if pod, err = c.GetPod(podNamespace, podName); err != nil {
return err
}
newPod := pod.DeepCopy()
newPod.Annotations[key] = val
if err = c.rawK8SClient.Patch(ctx, newPod, client.MergeFrom(pod)); err != nil {
log.Errorf("Failed to annotate %s the pod with %s, error %v", key, val, err)
return err
}
log.Debugf("Annotates pod %s with %s: %s", podName, key, val)
return nil
})
return err
}
func (c *IPAMContext) tryUnassignIPsFromENIs() {
log.Debugf("In tryUnassignIPsFromENIs")
eniInfos := c.dataStore.GetENIInfos()
for eniID := range eniInfos.ENIs {
c.tryUnassignIPFromENI(eniID)
}
}
func (c *IPAMContext) tryUnassignIPFromENI(eniID string) {
freeableIPs := c.dataStore.FreeableIPs(eniID)
if len(freeableIPs) == 0 {
log.Debugf("No freeable IPs")
return
}
// Delete IPs from datastore
var deletedIPs []string
for _, toDelete := range freeableIPs {
// Don't force the delete, since a freeable IP might have been assigned to a pod
// before we get around to deleting it.
err := c.dataStore.DelIPv4CidrFromStore(eniID, toDelete, false /* force */)
if err != nil {
log.Warnf("Failed to delete IP %s on ENI %s from datastore: %s", toDelete, eniID, err)
ipamdErrInc("decreaseIPPool")
continue
} else {
deletedIPs = append(deletedIPs, toDelete.IP.String())
}
}
// Deallocate IPs from the instance if they aren't used by pods.
if err := c.awsClient.DeallocIPAddresses(eniID, deletedIPs); err != nil {
log.Warnf("Failed to decrease IP pool by removing IPs %v from ENI %s: %s", deletedIPs, eniID, err)
} else {
log.Debugf("Successfully decreased IP pool by removing IPs %v from ENI %s", deletedIPs, eniID)
}
}
func (c *IPAMContext) tryUnassignPrefixesFromENIs() {
eniInfos := c.dataStore.GetENIInfos()
for eniID := range eniInfos.ENIs {
c.tryUnassignPrefixFromENI(eniID)
}
}
func (c *IPAMContext) tryUnassignPrefixFromENI(eniID string) {
freeablePrefixes := c.dataStore.FreeablePrefixes(eniID)
if len(freeablePrefixes) == 0 {
return
}
// Delete Prefixes from datastore
var deletedPrefixes []string
for _, toDelete := range freeablePrefixes {
// Don't force the delete, since a freeable Prefix might have been assigned to a pod
// before we get around to deleting it.
err := c.dataStore.DelIPv4CidrFromStore(eniID, toDelete, false /* force */)
if err != nil {
log.Warnf("Failed to delete Prefix %s on ENI %s from datastore: %s", toDelete, eniID, err)
ipamdErrInc("decreaseIPPool")
return
} else {
deletedPrefixes = append(deletedPrefixes, toDelete.String())
}
}
// Deallocate IPs from the instance if they aren't used by pods.
if err := c.awsClient.DeallocPrefixAddresses(eniID, deletedPrefixes); err != nil {
log.Warnf("Failed to delete prefix %v from ENI %s: %s", deletedPrefixes, eniID, err)
} else {
log.Debugf("Successfully prefix removing IPs %v from ENI %s", deletedPrefixes, eniID)
}
}
func (c *IPAMContext) GetENIResourcesToAllocate() int {
var resourcesToAllocate int
if !c.enablePrefixDelegation {
resourcesToAllocate = c.maxIPsPerENI
short, _, warmTargetDefined := c.datastoreTargetState()
if warmTargetDefined {
resourcesToAllocate = short
}
} else {
resourcesToAllocate = c.getPrefixesNeeded()
}
return resourcesToAllocate
}
func (c *IPAMContext) GetIPv4Limit() (int, int, error) {
var maxIPsPerENI, maxPrefixesPerENI, maxIpsPerPrefix int
if !c.enablePrefixDelegation {
maxIPsPerENI = c.awsClient.GetENIIPv4Limit()
maxPrefixesPerENI = 0
} else if c.enablePrefixDelegation {
//Single PD - allocate one prefix per ENI and new add will be new ENI + prefix
//Multi - allocate one prefix per ENI and new add will be new prefix or new ENI + prefix
_, maxIpsPerPrefix, _ = datastore.GetPrefixDelegationDefaults()
maxPrefixesPerENI = c.awsClient.GetENIIPv4Limit()
maxIPsPerENI = maxPrefixesPerENI * maxIpsPerPrefix
log.Debugf("max prefix %d max ips %d", maxPrefixesPerENI, maxIPsPerENI)
}
return maxIPsPerENI, maxPrefixesPerENI, nil
}
func (c *IPAMContext) isDatastorePoolTooLow() bool {
short, _, warmTargetDefined := c.datastoreTargetState()
if warmTargetDefined {
return short > 0
}
total, used, _, cooldownIPs := c.dataStore.GetStats(ipV4AddrFamily)
available := total - used
warmTarget := c.warmENITarget
totalIPs := c.maxIPsPerENI
if c.enablePrefixDelegation {
warmTarget = c.warmPrefixTarget
_, maxIpsPerPrefix, _ := datastore.GetPrefixDelegationDefaults()
totalIPs = maxIpsPerPrefix
}
poolTooLow := available < totalIPs*warmTarget || (warmTarget == 0 && available == 0)
if poolTooLow {
logPoolStats(total, used, cooldownIPs, c.maxIPsPerENI, c.enablePrefixDelegation)
log.Debugf("IP pool is too low: available (%d) < ENI target (%d) * addrsPerENI (%d)", available, warmTarget, totalIPs)
}
return poolTooLow
}
func (c *IPAMContext) isDatastorePoolTooHigh() bool {
_, over, warmTargetDefined := c.datastoreTargetState()
if warmTargetDefined {
return over > 0
}
//For the existing ENIs check if we can cleanup prefixes
if c.warmPrefixTargetDefined() {
freePrefixes := c.dataStore.GetFreePrefixes()
poolTooHigh := freePrefixes > c.warmPrefixTarget
if poolTooHigh {
log.Debugf("Prefix pool is high so might be able to deallocate : free prefixes %d and warm prefix target %d", freePrefixes, c.warmPrefixTarget)
}
return poolTooHigh
}
// We only ever report the pool being too high if WARM_IP_TARGET or WARM_PREFIX_TARGET is set
return false
}
func (c *IPAMContext) warmPrefixTargetDefined() bool {
return c.warmPrefixTarget >= defaultWarmPrefixTarget && c.enablePrefixDelegation
}
//DeallocCidrs frees IPs and Prefixes from EC2
func (c *IPAMContext) DeallocCidrs(eniID string, deletableCidrs []datastore.CidrInfo) {
var deletableIPs []string
var deletablePrefixes []string
for _, toDeleteCidr := range deletableCidrs {
if toDeleteCidr.IsPrefix {
strDeletablePrefix := toDeleteCidr.Cidr.String()
deletablePrefixes = append(deletablePrefixes, strDeletablePrefix)
// Track the last time we unassigned Cidrs from an ENI. We won't reconcile any Cidrs in this cache
// for at least ipReconcileCooldown
c.reconcileCooldownCache.Add(strDeletablePrefix)
} else {
strDeletableIP := toDeleteCidr.Cidr.IP.String()
deletableIPs = append(deletableIPs, strDeletableIP)
// Track the last time we unassigned IPs from an ENI. We won't reconcile any IPs in this cache
// for at least ipReconcileCooldown
c.reconcileCooldownCache.Add(strDeletableIP)
}
}
if err := c.awsClient.DeallocPrefixAddresses(eniID, deletablePrefixes); err != nil {
log.Warnf("Failed to free Prefixes %v from ENI %s: %s", deletablePrefixes, eniID, err)
}
if err := c.awsClient.DeallocIPAddresses(eniID, deletableIPs); err != nil {
log.Warnf("Failed to free IPs %v from ENI %s: %s", deletableIPs, eniID, err)
}
}
// getPrefixesNeeded returns the number of prefixes need to be allocated to the ENI
func (c *IPAMContext) getPrefixesNeeded() int {
//By default allocate 1 prefix at a time
toAllocate := 1
//TODO - post GA we can evaluate to see if these two calls can be merged.
//datastoreTargetState already has complex math so adding Prefix target will make it
//even more complex.
short, _, warmIPTargetDefined := c.datastoreTargetState()
shortPrefixes, warmPrefixTargetDefined := c.datastorePrefixTargetState()
//WARM_IP_TARGET takes precendence over WARM_PREFIX_TARGET
if warmIPTargetDefined {
toAllocate = max(toAllocate, short)
} else if warmPrefixTargetDefined {
toAllocate = max(toAllocate, shortPrefixes)
}
log.Debugf("ToAllocate: %d", toAllocate)
return toAllocate
}
func (c *IPAMContext) initENIAndIPLimits() (err error) {
if c.enableIPv4 {
nodeMaxENI, err := c.getMaxENI()
if err != nil {
log.Error("Failed to get ENI limit")
return err
}
c.maxENI = nodeMaxENI
c.maxIPsPerENI, c.maxPrefixesPerENI, err = c.GetIPv4Limit()
if err != nil {
return err
}
log.Debugf("Max ip per ENI %d and max prefixes per ENI %d", c.maxIPsPerENI, c.maxPrefixesPerENI)
}
//WARM and MAX ENI & IP/Prefix counts are no-op in IPv6 Prefix delegation mode. Once we start supporting IPv6 in
//Secondary IP mode, these variables will play the same role as they currently do in IPv4 mode. So, for now we
//leave them at their default values.
return nil
}
func (c *IPAMContext) isConfigValid() bool {
//Get Instance type
hypervisorType := c.awsClient.GetInstanceHypervisorFamily()
//Validate that only one among v4 and v6 is enabled.
if c.enableIPv4 && c.enableIPv6 {
log.Errorf("IPv4 and IPv6 are both enabled. VPC CNI currently doesn't support dual stack mode")
return false
} else if !c.enableIPv4 && !c.enableIPv6 {
log.Errorf("IPv4 and IPv6 are both disabled. One of them have to be enabled")
return false
}
//Validate PD mode is enabled if VPC CNI is operating in IPv6 mode. SGPP and Custom networking are not supported in IPv6 mode.
if c.enableIPv6 && (c.enablePodENI || c.useCustomNetworking || !c.enablePrefixDelegation) {
log.Errorf("IPv6 is supported only in Prefix Delegation mode. Security Group Per Pod and " +
"Custom Networking are not supported in IPv6 mode. Please set the env variables accordingly.")
return false
}
//Validate Prefix Delegation against v4 and v6 modes.
if hypervisorType != "nitro" && c.enablePrefixDelegation {
if c.enableIPv6 {
log.Errorf("Prefix Delegation is not supported on non-nitro instance %s. IPv6 is only supported in Prefix delegation Mode. ", c.awsClient.GetInstanceType())
return false
}
log.Warnf("Prefix delegation is not supported on non-nitro instance %s hence falling back to default (secondary IP) mode", c.awsClient.GetInstanceType())
c.enablePrefixDelegation = false
}
return true
}
| {
return getEnvBoolWithDefault(envDisableENIProvisioning, false)
} |
replayService.js | import ReplayProvider from "../providers/replayProvider";
export default class | {
constructor() {
this.provider = new ReplayProvider();
}
async playerExists(player) {
return await this.provider.playerExists(player);
}
async getPlayer(filter) {
return await this.provider.getPlayer(filter);
}
upload(formData) {
return this.provider.upload(formData);
}
}
| ReplayService |
utils.py | import secrets
import time
class Utils:
_instance = None
def __init__(self) -> None:
pass
@staticmethod
def | ():
"""
Generates a random hexicdecimal string
Returns:
`str`: string with hexidecimal values
>>>
"""
token = secrets.token_hex()
now = time.time()
id = now + token
return id
@staticmethod
def getInstance(re_init=False):
if Utils._instance is None or re_init:
Utils._instance = Utils()
return Utils._instance
| generateRandomId |
close_code.rs | use serde_repr::{Deserialize_repr, Serialize_repr};
use std::convert::TryFrom;
use std::{
error::Error,
fmt::{Display, Formatter, Result as FmtResult},
};
/// Gateway close event codes.
#[derive(
Clone, Copy, Debug, Deserialize_repr, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize_repr,
)]
#[non_exhaustive]
#[repr(u16)]
pub enum CloseCode {
/// An unknown error occurred.
UnknownError = 4000,
/// An invalid opcode or payload for an opcode was sent.
UnknownOpcode = 4001,
/// An invalid payload was sent.
DecodeError = 4002,
/// A payload was sent prior to identifying.
NotAuthenticated = 4003,
/// An invalid token was sent when identifying.
AuthenticationFailed = 4004,
/// Multiple identify payloads were sent.
AlreadyAuthenticated = 4005,
/// An invalid sequence was sent for resuming.
InvalidSequence = 4007,
/// Too many payloads were sent in a certain amount of time.
RateLimited = 4008,
/// The session timed out.
SessionTimedOut = 4009,
/// An invalid shard was sent when identifying.
InvalidShard = 4010,
/// Sharding is required because there are too many guilds.
ShardingRequired = 4011,
/// An invalid version for the gateway was sent.
InvalidApiVersion = 4012,
/// An invalid intent was sent.
InvalidIntents = 4013,
/// A disallowed intent was sent, may need allowlisting.
DisallowedIntents = 4014,
}
#[derive(Debug, PartialEq)]
pub struct CloseCodeConversionError {
code: u16,
}
impl CloseCodeConversionError {
fn | (code: u16) -> Self {
Self { code }
}
pub fn code(&self) -> u16 {
self.code
}
}
impl Display for CloseCodeConversionError {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
f.write_fmt(format_args!("{} isn't a valid close code", self.code))
}
}
impl Error for CloseCodeConversionError {}
impl TryFrom<u16> for CloseCode {
type Error = CloseCodeConversionError;
fn try_from(value: u16) -> Result<Self, Self::Error> {
let close_code = match value {
4000 => CloseCode::UnknownError,
4001 => CloseCode::UnknownOpcode,
4002 => CloseCode::DecodeError,
4003 => CloseCode::NotAuthenticated,
4004 => CloseCode::AuthenticationFailed,
4005 => CloseCode::AlreadyAuthenticated,
4007 => CloseCode::InvalidSequence,
4008 => CloseCode::RateLimited,
4009 => CloseCode::SessionTimedOut,
4010 => CloseCode::InvalidShard,
4011 => CloseCode::ShardingRequired,
4012 => CloseCode::InvalidApiVersion,
4013 => CloseCode::InvalidIntents,
4014 => CloseCode::DisallowedIntents,
_ => return Err(CloseCodeConversionError::new(value)),
};
Ok(close_code)
}
}
#[cfg(test)]
mod tests {
use super::CloseCode;
use serde_test::Token;
use std::convert::TryFrom;
#[test]
fn test_variants() {
serde_test::assert_tokens(&CloseCode::UnknownError, &[Token::U16(4000)]);
serde_test::assert_tokens(&CloseCode::UnknownOpcode, &[Token::U16(4001)]);
serde_test::assert_tokens(&CloseCode::DecodeError, &[Token::U16(4002)]);
serde_test::assert_tokens(&CloseCode::NotAuthenticated, &[Token::U16(4003)]);
serde_test::assert_tokens(&CloseCode::AuthenticationFailed, &[Token::U16(4004)]);
serde_test::assert_tokens(&CloseCode::AlreadyAuthenticated, &[Token::U16(4005)]);
serde_test::assert_tokens(&CloseCode::InvalidSequence, &[Token::U16(4007)]);
serde_test::assert_tokens(&CloseCode::RateLimited, &[Token::U16(4008)]);
serde_test::assert_tokens(&CloseCode::SessionTimedOut, &[Token::U16(4009)]);
serde_test::assert_tokens(&CloseCode::InvalidShard, &[Token::U16(4010)]);
serde_test::assert_tokens(&CloseCode::ShardingRequired, &[Token::U16(4011)]);
serde_test::assert_tokens(&CloseCode::InvalidApiVersion, &[Token::U16(4012)]);
serde_test::assert_tokens(&CloseCode::InvalidIntents, &[Token::U16(4013)]);
serde_test::assert_tokens(&CloseCode::DisallowedIntents, &[Token::U16(4014)]);
}
#[test]
fn test_conversion() {
assert_eq!(CloseCode::try_from(4000).unwrap(), CloseCode::UnknownError);
assert_eq!(CloseCode::try_from(4001).unwrap(), CloseCode::UnknownOpcode);
assert_eq!(CloseCode::try_from(4002).unwrap(), CloseCode::DecodeError);
assert_eq!(
CloseCode::try_from(4003).unwrap(),
CloseCode::NotAuthenticated
);
assert_eq!(
CloseCode::try_from(4004).unwrap(),
CloseCode::AuthenticationFailed
);
assert_eq!(
CloseCode::try_from(4005).unwrap(),
CloseCode::AlreadyAuthenticated
);
assert_eq!(
CloseCode::try_from(4007).unwrap(),
CloseCode::InvalidSequence
);
assert_eq!(CloseCode::try_from(4008).unwrap(), CloseCode::RateLimited);
assert_eq!(
CloseCode::try_from(4009).unwrap(),
CloseCode::SessionTimedOut
);
assert_eq!(CloseCode::try_from(4010).unwrap(), CloseCode::InvalidShard);
assert_eq!(
CloseCode::try_from(4011).unwrap(),
CloseCode::ShardingRequired
);
assert_eq!(
CloseCode::try_from(4012).unwrap(),
CloseCode::InvalidApiVersion
);
assert_eq!(
CloseCode::try_from(4013).unwrap(),
CloseCode::InvalidIntents
);
assert_eq!(
CloseCode::try_from(4014).unwrap(),
CloseCode::DisallowedIntents
);
assert!(CloseCode::try_from(5000).is_err());
}
}
| new |
debug.rs | use crate::types::RequiredInterface;
use crate::types::TypeName;
use proc_macro2::TokenStream;
use quote::quote;
pub fn debug_tokens(type_name: &TypeName, interfaces: &Vec<RequiredInterface>) -> TokenStream {
let name = &type_name.name;
let default_impl = quote! { ::std::format!("{}({:?})", #name, <Self as ::winrt::AbiTransferable>::get_abi(self)) };
| let implements_istringable = interfaces.iter().any(|interface| {
interface.name.name == "IStringable" && interface.name.namespace == "Windows.Foundation"
});
let is_istringable =
type_name.name == "IStringable" && type_name.namespace == "Windows.Foundation";
let implementation = if implements_istringable && !is_istringable {
let istringable_namespace = crate::types::namespace::to_namespace_tokens(
"Windows.Foundation",
&type_name.namespace,
);
quote! {
"{:?}",
{
let s: #istringable_namespace IStringable = self.into();
s
}
}
} else if is_istringable {
quote! {
"{}",
match self.to_string() {
Ok(s) => s.to_string(),
Err(_) => #default_impl
}
}
} else {
quote! {
"{}", #default_impl
}
};
to_tokens(type_name, &implementation)
}
pub fn default_debug_tokens(type_name: &TypeName) -> TokenStream {
let name = &type_name.name;
let implementation =
quote! { "{}({:?})", #name, <Self as ::winrt::AbiTransferable>::get_abi(self) };
to_tokens(type_name, &implementation)
}
fn to_tokens(type_name: &TypeName, implementation: &TokenStream) -> TokenStream {
let constraints = &*type_name.constraints();
let name = &*type_name.to_tokens(&type_name.namespace);
quote! {
impl<#constraints> ::std::fmt::Debug for #name {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(
f,
#implementation
)
}
}
}
} | |
main.rs | // SPDX-License-Identifier: MIT OR Apache-2.0
//
// Copyright (c) 2018-2021 Andre Richter <[email protected]>
// Rust embedded logo for `make doc`.
#![doc(html_logo_url = "https://git.io/JeGIp")]
//! The `kernel` binary.
//!
//! # Code organization and architecture
//!
//! The code is divided into different *modules*, each representing a typical **subsystem** of the
//! `kernel`. Top-level module files of subsystems reside directly in the `src` folder. For example,
//! `src/memory.rs` contains code that is concerned with all things memory management.
//!
//! ## Visibility of processor architecture code
//!
//! Some of the `kernel`'s subsystems depend on low-level code that is specific to the target
//! processor architecture. For each supported processor architecture, there exists a subfolder in
//! `src/_arch`, for example, `src/_arch/aarch64`.
//!
//! The architecture folders mirror the subsystem modules laid out in `src`. For example,
//! architectural code that belongs to the `kernel`'s MMU subsystem (`src/memory/mmu.rs`) would go
//! into `src/_arch/aarch64/memory/mmu.rs`. The latter file is loaded as a module in
//! `src/memory/mmu.rs` using the `path attribute`. Usually, the chosen module name is the generic
//! module's name prefixed with `arch_`.
//!
//! For example, this is the top of `src/memory/mmu.rs`:
//!
//! ```
//! #[cfg(target_arch = "aarch64")]
//! #[path = "../_arch/aarch64/memory/mmu.rs"]
//! mod arch_mmu;
//! ```
//!
//! Often times, items from the `arch_ module` will be publicly reexported by the parent module.
//! This way, each architecture specific module can provide its implementation of an item, while the
//! caller must not be concerned which architecture has been conditionally compiled.
//!
//! ## BSP code
//!
//! `BSP` stands for Board Support Package. `BSP` code is organized under `src/bsp.rs` and contains
//! target board specific definitions and functions. These are things such as the board's memory map
//! or instances of drivers for devices that are featured on the respective board.
//!
//! Just like processor architecture code, the `BSP` code's module structure tries to mirror the
//! `kernel`'s subsystem modules, but there is no reexporting this time. That means whatever is
//! provided must be called starting from the `bsp` namespace, e.g. `bsp::driver::driver_manager()`.
//!
//! ## Kernel interfaces
//!
//! Both `arch` and `bsp` contain code that is conditionally compiled depending on the actual target
//! and board for which the kernel is compiled. For example, the `interrupt controller` hardware of
//! the `Raspberry Pi 3` and the `Raspberry Pi 4` is different, but we want the rest of the `kernel`
//! code to play nicely with any of the two without much hassle.
//!
//! In order to provide a clean abstraction between `arch`, `bsp` and `generic kernel code`, | //! interface to the rest of the `kernel`.
//!
//! ```
//! +-------------------+
//! | Interface (Trait) |
//! | |
//! +--+-------------+--+
//! ^ ^
//! | |
//! | |
//! +----------+--+ +--+----------+
//! | kernel code | | bsp code |
//! | | | arch code |
//! +-------------+ +-------------+
//! ```
//!
//! # Summary
//!
//! For a logical `kernel` subsystem, corresponding code can be distributed over several physical
//! locations. Here is an example for the **memory** subsystem:
//!
//! - `src/memory.rs` and `src/memory/**/*`
//! - Common code that is agnostic of target processor architecture and `BSP` characteristics.
//! - Example: A function to zero a chunk of memory.
//! - Interfaces for the memory subsystem that are implemented by `arch` or `BSP` code.
//! - Example: An `MMU` interface that defines `MMU` function prototypes.
//! - `src/bsp/__board_name__/memory.rs` and `src/bsp/__board_name__/memory/**/*`
//! - `BSP` specific code.
//! - Example: The board's memory map (physical addresses of DRAM and MMIO devices).
//! - `src/_arch/__arch_name__/memory.rs` and `src/_arch/__arch_name__/memory/**/*`
//! - Processor architecture specific code.
//! - Example: Implementation of the `MMU` interface for the `__arch_name__` processor
//! architecture.
//!
//! From a namespace perspective, **memory** subsystem code lives in:
//!
//! - `crate::memory::*`
//! - `crate::bsp::memory::*`
//!
//! # Boot flow
//!
//! 1. The kernel's entry point is the function `cpu::boot::arch_boot::_start()`.
//! - It is implemented in `src/_arch/__arch_name__/cpu/boot.s`.
//! 2. Once finished with architectural setup, the arch code calls `kernel_init()`.
#![allow(clippy::upper_case_acronyms)]
#![allow(incomplete_features)]
#![feature(const_fn_fn_ptr_basics)]
#![feature(core_intrinsics)]
#![feature(format_args_nl)]
#![feature(global_asm)]
#![feature(panic_info_message)]
#![feature(trait_alias)]
#![no_main]
#![no_std]
mod bsp;
mod console;
mod cpu;
mod driver;
mod exception;
mod memory;
mod panic_wait;
mod print;
mod synchronization;
mod time;
/// Early init code.
///
/// # Safety
///
/// - Only a single core must be active and running this function.
/// - The init calls in this function must appear in the correct order:
/// - MMU + Data caching must be activated at the earliest. Without it, any atomic operations,
/// e.g. the yet-to-be-introduced spinlocks in the device drivers (which currently employ
/// NullLocks instead of spinlocks), will fail to work (properly) on the RPi SoCs.
unsafe fn kernel_init() -> ! {
use driver::interface::DriverManager;
use memory::mmu::interface::MMU;
if let Err(string) = memory::mmu::mmu().enable_mmu_and_caching() {
panic!("MMU: {}", string);
}
for i in bsp::driver::driver_manager().all_device_drivers().iter() {
if let Err(x) = i.init() {
panic!("Error loading driver: {}: {}", i.compatible(), x);
}
}
bsp::driver::driver_manager().post_device_driver_init();
// println! is usable from here on.
// Transition from unsafe to safe.
kernel_main()
}
/// The main function running after the early init.
fn kernel_main() -> ! {
use bsp::console::console;
use console::interface::All;
use core::time::Duration;
use driver::interface::DriverManager;
use time::interface::TimeManager;
info!(
"{} version {}",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION")
);
info!("Booting on: {}", bsp::board_name());
info!("MMU online. Special regions:");
bsp::memory::mmu::virt_mem_layout().print_layout();
let (_, privilege_level) = exception::current_privilege_level();
info!("Current privilege level: {}", privilege_level);
info!("Exception handling state:");
exception::asynchronous::print_state();
info!(
"Architectural timer resolution: {} ns",
time::time_manager().resolution().as_nanos()
);
info!("Drivers loaded:");
for (i, driver) in bsp::driver::driver_manager()
.all_device_drivers()
.iter()
.enumerate()
{
info!(" {}. {}", i + 1, driver.compatible());
}
info!("Timer test, spinning for 1 second");
time::time_manager().spin_for(Duration::from_secs(1));
let remapped_uart = unsafe { bsp::device_driver::PL011Uart::new(0x1FFF_1000) };
writeln!(
remapped_uart,
"[ !!! ] Writing through the remapped UART at 0x1FFF_1000"
)
.unwrap();
info!("Echoing input now");
// Discard any spurious received characters before going into echo mode.
console().clear_rx();
loop {
let c = bsp::console::console().read_char();
bsp::console::console().write_char(c);
}
} | //! `interface` traits are provided *whenever possible* and *where it makes sense*. They are defined
//! in the respective subsystem module and help to enforce the idiom of *program to an interface,
//! not an implementation*. For example, there will be a common IRQ handling interface which the two
//! different interrupt controller `drivers` of both Raspberrys will implement, and only export the |
cam.py | """Wrapper around two RPI and the multi channel switch.
It relies on the use of the Multi_Adapter_Board_2Channel_uc444 for switching camera via the I2C and GPIO control.
See https://github.com/ArduCAM/RaspberryPi/tree/master/Multi_Camera_Adapter/Multi_Adapter_Board_2Channel_uc444
"""
import time
import cv2 as cv
from threading import Thread, Event, Lock
from ..error import CameraNotFoundError
class BackgroundVideoCapture(object):
"""Wrapper on OpenCV VideoCapture object.
Args:
camera_index (int): index of the used camera (see OpenCV doc for details)
resolution (int, int): desired resolution for the grabbed frame (the resolution must be compatible with the driver)
Instantiating this object will automatically start the polling of image in background.
This wrapper is reponsible for automatically polling image on the camera.
This ensures that we can always access the most recent image.
"""
def __init__(self, camera_index, resolution=(600, 800), lazy_setup=True):
|
def _setup(self):
self.cap = cv.VideoCapture(self.camera_index)
if not self.cap.isOpened():
raise CameraNotFoundError(
message=f'Camera {self.camera_index} not found!',
camera_id=self.camera_index,
)
self.cap.set(cv.CAP_PROP_FOURCC, cv.VideoWriter_fourcc('M', 'J', 'P', 'G'))
self.cap.set(cv.CAP_PROP_FRAME_HEIGHT, self.resolution[0])
self.cap.set(cv.CAP_PROP_FRAME_WIDTH, self.resolution[1])
self._lock = Lock()
self.running = Event()
self._img = None
self._t = Thread(target=self._read_loop)
self._t.daemon = True
self._t.start()
for _ in range(50):
time.sleep(0.1)
if self._img is not None:
break
def close(self):
"""Stop polling image and release the Video Capture."""
self.running.clear()
if self._t.is_alive():
self._t.join()
self.cap.release()
def _read_loop(self):
self.running.set()
while self.running.is_set():
b, img = self.cap.read()
if b:
with self._lock:
self._img = img.copy()
def read(self):
"""Retrieve the last grabbed image."""
if not hasattr(self, 'cap'):
self._setup()
with self._lock:
return self._img is not None, self._img
| """Open video capture on the specified camera."""
self.camera_index = camera_index
self.resolution = resolution
if not lazy_setup:
self._setup() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.