file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
file_test.go | // Copyright 2017 Capsule8, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package functional
import (
"testing"
api "github.com/capsule8/capsule8/api/v0"
"github.com/golang/glog"
"github.com/golang/protobuf/ptypes/wrappers"
)
type fileTest struct {
testContainer *Container
openEvts map[string]*api.FileEvent
}
func newFileTest() (*fileTest, error) {
oe, err := fileTestDataMap()
if err != nil {
return nil, err
}
return &fileTest{openEvts: oe}, nil
}
func (ft *fileTest) BuildContainer(t *testing.T) string {
c := NewContainer(t, "file")
err := c.Build()
if err != nil {
t.Error(err)
return ""
}
glog.V(1).Infof("Built container %s\n", c.ImageID[0:12])
ft.testContainer = c
return ft.testContainer.ImageID
}
func (ft *fileTest) RunContainer(t *testing.T) {
err := ft.testContainer.Run()
if err != nil {
t.Error(err)
}
glog.V(1).Infof("Running container %s\n", ft.testContainer.ImageID[0:12])
}
func (ft *fileTest) CreateSubscription(t *testing.T) *api.Subscription {
fileEvents := []*api.FileEventFilter{}
for _, fe := range ft.openEvts {
fileEvents = append(fileEvents, filterForTestData(fe))
}
eventFilter := &api.EventFilter{
FileEvents: fileEvents,
}
return &api.Subscription{
EventFilter: eventFilter,
}
}
func (ft *fileTest) HandleTelemetryEvent(t *testing.T, te *api.ReceivedTelemetryEvent) bool {
switch event := te.Event.Event.(type) {
case *api.TelemetryEvent_File:
if td, ok := ft.openEvts[event.File.Filename]; ok {
if !eventMatchFileTestData(event.File, td) {
t.Errorf("Expected %#v, got %#v\n", td, event.File)
}
delete(ft.openEvts, event.File.Filename)
}
}
glog.V(1).Infof("openEvts = %+v", ft.openEvts)
return len(ft.openEvts) > 0
}
func filterForTestData(fe *api.FileEvent) *api.FileEventFilter |
//
// TestFile checks that the sensor generates file open events when requested by
// the subscription. The file file/testdata/filedata.txt specifies the test
// cases.
//
func TestFile(t *testing.T) {
ft, err := newFileTest()
if err != nil {
t.Fatal(err)
}
tt := NewTelemetryTester(ft)
tt.RunTest(t)
}
| {
return &api.FileEventFilter{
Type: api.FileEventType_FILE_EVENT_TYPE_OPEN,
Filename: &wrappers.StringValue{Value: fe.Filename},
}
} |
0002_snowflake.py | # encoding: utf-8
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def | (self, orm):
# Changing field 'TwitterAccount.social_id'
db.alter_column('twitter_twitteraccount', 'social_id', self.gf('django.db.models.fields.BigIntegerField')(unique=True))
def backwards(self, orm):
# Changing field 'TwitterAccount.social_id'
db.alter_column('twitter_twitteraccount', 'social_id', self.gf('django.db.models.fields.PositiveIntegerField')(unique=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'twitter.twitteraccount': {
'Meta': {'object_name': 'TwitterAccount', '_ormbases': ['socialaccount.SocialAccount']},
'profile_image_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'social_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'socialaccount_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['socialaccount.SocialAccount']", 'unique': 'True', 'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '15'})
},
'twitter.twitterapp': {
'Meta': {'object_name': 'TwitterApp'},
'access_token_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'authorize_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'consumer_key': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'consumer_secret': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'request_token_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"})
}
}
complete_apps = ['twitter']
| forwards |
x25519.rs | // MIT License
// Copyright (c) 2021-2022 The orion Developers
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! # Parameters:
//! - `private_key`: The private key used in key agreement.
//! - `public_key`: The public key used in key agreement.
//!
//! # Errors:
//! An error will be returned if:
//! - The `key_agreement()` operation results in an all-zero output.
//!
//! # Security:
//! - Multiple different `private_key`/`public_key` pairs can produce the same shared key. Therefore,
//! using the resulting `SharedKey`, directly from `key_agreement()`, is not recommended. This is handled
//! automatically in [`orion::kex`].
//! - To securely generate a strong key, use [`PrivateKey::generate()`].
//!
//! # Recommendation:
//! - It is recommended to use [`orion::kex`] when possible.
//!
//! # Example:
//! ```rust
//! # #[cfg(feature = "safe_api")] {
//! use orion::hazardous::ecc::x25519::{PrivateKey, PublicKey, SharedKey, key_agreement};
//! use core::convert::TryFrom;
//!
//! // Alice generates a private key and computes the corresponding public key
//! let alice_sk = PrivateKey::generate();
//! let alice_pk = PublicKey::try_from(&alice_sk)?;
//!
//! // Bob does the same
//! let bob_sk = PrivateKey::generate();
//! let bob_pk = PublicKey::try_from(&bob_sk)?;
//!
//! // They both compute a shared key using the others public key
//! let alice_shared = key_agreement(&alice_sk, &bob_pk)?;
//! let bob_shared = key_agreement(&bob_sk, &alice_pk)?;
//!
//! assert_eq!(alice_shared, bob_shared);
//! # }
//! # Ok::<(), orion::errors::UnknownCryptoError>(())
//! ```
//! [`PrivateKey::generate()`]: crate::hazardous::ecc::x25519::PrivateKey::generate
//! [`orion::kex`]: crate::kex
use serde::{Serialize, Deserialize};
use crate::errors::UnknownCryptoError;
use crate::util::secure_cmp;
use core::ops::{Add, Mul, Sub};
/// Formally verified Curve25519 field arithmetic from: <https://github.com/mit-plv/fiat-crypto>.
use fiat_crypto::curve25519_64 as fiat_curve25519_u64;
/// The size of a public key used in X25519.
pub const PUBLIC_KEY_SIZE: usize = 32;
/// The size of a private key used in X25519.
pub const PRIVATE_KEY_SIZE: usize = 32;
/// The size of a shared key used in X25519.
pub const SHARED_KEY_SIZE: usize = 32;
/// u-coordinate of the base point.
const BASEPOINT: [u8; 32] = [
9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
/// The result of computing a shared secret with a low order point.
const LOW_ORDER_POINT_RESULT: [u8; 32] = [0u8; 32];
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
/// Represent an element in the curve field.
struct FieldElement([u64; 5]);
impl Eq for FieldElement {}
impl PartialEq for FieldElement {
fn eq(&self, other: &Self) -> bool {
use subtle::ConstantTimeEq;
self.as_bytes().ct_eq(&other.as_bytes()).into()
}
}
/// The function fiat_25519_carry_mul multiplies two field elements and reduces the result.
impl Mul for FieldElement {
type Output = Self;
fn mul(self, rhs: Self) -> Self::Output {
use fiat_curve25519_u64::fiat_25519_carry_mul;
let mut ret = [0u64; 5];
fiat_25519_carry_mul(&mut ret, &self.0, &rhs.0);
Self(ret)
}
}
/// The function fiat_25519_add adds two field elements.
impl Add for FieldElement {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
use fiat_curve25519_u64::{fiat_25519_add, fiat_25519_carry};
let mut ret = [0u64; 5];
fiat_25519_add(&mut ret, &self.0, &rhs.0);
let tmp = ret;
fiat_25519_carry(&mut ret, &tmp);
Self(ret)
}
}
/// The function fiat_25519_sub subtracts two field elements.
impl Sub for FieldElement {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
use fiat_curve25519_u64::{fiat_25519_carry, fiat_25519_sub};
let mut ret = [0u64; 5];
fiat_25519_sub(&mut ret, &self.0, &rhs.0);
let tmp = ret;
fiat_25519_carry(&mut ret, &tmp);
Self(ret)
}
}
impl FieldElement {
/// Create a `FieldElement` that is `0`.
fn zero() -> Self {
Self([0u64, 0u64, 0u64, 0u64, 0u64])
}
/// Create a `FieldElement` that is `1`.
fn one() -> Self {
Self([1u64, 0u64, 0u64, 0u64, 0u64])
}
/// Serialize the `FieldElement` as a byte-array.
fn as_bytes(&self) -> [u8; 32] {
// The function fiat_25519_to_bytes serializes a field element to bytes in little-endian order.
use fiat_curve25519_u64::fiat_25519_to_bytes;
let mut ret = [0u8; 32];
fiat_25519_to_bytes(&mut ret, &self.0);
ret
}
/// Deserialize the `FieldElement` from a byte-array in little-endian.
///
/// Masks the MSB in the final byte of the input bytes.
fn from_bytes(bytes: &[u8; 32]) -> Self {
// The function fiat_25519_from_bytes deserializes a field element from bytes in little-endian order
use fiat_curve25519_u64::fiat_25519_from_bytes;
let mut temp = [0u8; 32];
temp.copy_from_slice(bytes);
temp[31] &= 127u8; // See RFC: "When receiving such an array, implementations of X25519
// (but not X448) MUST mask the most significant bit in the final byte."
let mut ret = [0u64; 5];
fiat_25519_from_bytes(&mut ret, &temp);
Self(ret)
}
/// A conditional-swap operation.
fn conditional_swap(swap: u8, a: &mut Self, b: &mut Self) {
// The function fiat_25519_selectznz is a multi-limb conditional select.
use fiat_curve25519_u64::fiat_25519_selectznz;
// SAFETY: This is a part of fiat input bounds.
debug_assert!(swap == 1 || swap == 0);
let tmp_a = *a;
let tmp_b = *b;
fiat_25519_selectznz(&mut a.0, swap, &tmp_a.0, &tmp_b.0);
fiat_25519_selectznz(&mut b.0, swap, &tmp_b.0, &tmp_a.0);
}
/// Square the `FieldElement` and reduce the result.
fn square(&self) -> Self {
use fiat_curve25519_u64::fiat_25519_carry_square;
let mut ret = [0u64; 5];
fiat_25519_carry_square(&mut ret, &self.0);
Self(ret)
}
/// Multiply the `FieldElement` by 121666 and reduce the result.
fn mul_121666(&self) -> Self {
use fiat_curve25519_u64::fiat_25519_carry_scmul_121666;
let mut ret = [0u64; 5];
fiat_25519_carry_scmul_121666(&mut ret, &self.0);
Self(ret)
}
/// Compute the multiplicative inverse of the `FieldElement`.
///
/// Ref: https://github.com/golang/crypto/blob/0c34fe9e7dc2486962ef9867e3edb3503537209f/curve25519/curve25519_generic.go#L718
fn invert(&mut self) {
let mut t0: FieldElement;
let mut t1: FieldElement;
let mut t2: FieldElement;
let mut t3: FieldElement;
t0 = self.square();
t1 = t0.square();
t1 = t1.square();
t1 = *self * t1;
t0 = t0 * t1;
t2 = t0.square();
t1 = t1 * t2;
t2 = t1.square();
for _ in 1..5 {
t2 = t2.square();
}
t1 = t2 * t1;
t2 = t1.square();
for _ in 1..10 {
t2 = t2.square();
}
t2 = t2 * t1;
t3 = t2.square();
for _ in 1..20 {
t3 = t3.square();
}
t2 = t3 * t2;
t2 = t2.square();
for _ in 1..10 {
t2 = t2.square();
}
t1 = t2 * t1;
t2 = t1.square();
for _ in 1..50 {
t2 = t2.square();
}
t2 = t2 * t1;
t3 = t2.square();
for _ in 1..100 {
t3 = t3.square();
}
t2 = t3 * t2;
t2 = t2.square();
for _ in 1..50 {
t2 = t2.square();
}
t1 = t2 * t1;
t1 = t1.square();
for _ in 1..5 {
t1 = t1.square();
}
*self = t1 * t0;
}
}
#[derive(Clone, Deserialize, Serialize)]
/// Represents a Scalar decoded from a byte array.
struct Scalar([u8; PRIVATE_KEY_SIZE]);
impl Drop for Scalar {
fn drop(&mut self) {
use zeroize::Zeroize;
self.0.iter_mut().zeroize();
}
}
impl PartialEq for Scalar {
fn eq(&self, other: &Self) -> bool {
use subtle::ConstantTimeEq;
self.0.ct_eq(&other.0).into()
}
}
impl Eq for Scalar {}
impl Scalar {
/// Create a scalar from some byte-array.
/// The scalar is clamped according to the RFC.
///
/// Ref: https://www.ietf.org/rfc/rfc7748.html#section-5
fn from_slice(slice: &[u8]) -> Result<Scalar, UnknownCryptoError> {
if slice.len() != PRIVATE_KEY_SIZE {
return Err(UnknownCryptoError);
}
let mut ret = [0u8; PRIVATE_KEY_SIZE];
ret.copy_from_slice(slice);
// Clamp
ret[0] &= 248;
ret[31] &= 127;
ret[31] |= 64;
Ok(Self(ret))
}
}
/// Scalar multiplication using the Montgomery Ladder (a.k.a "scalarmult")
///
/// Refs:
/// - https://eprint.iacr.org/2020/956.pdf
/// - https://eprint.iacr.org/2017/212.pdf
/// - https://github.com/golang/crypto/blob/0c34fe9e7dc2486962ef9867e3edb3503537209f/curve25519/curve25519_generic.go#L779
fn mont_ladder(scalar: &Scalar, point: FieldElement) -> FieldElement |
// NOTE: FieldElement contains a constant-time PartialEq<FieldElement> impl.
/// A type that represents a `PublicKey` that X25519 uses.
///
/// This type holds a field element and is used internally as the u-coordinate.
/// As the RFC mandates, the most significant bit of the last byte is masked.
///
/// # Errors:
/// An error will be returned if:
/// - `slice` is not 32 bytes.
#[derive(PartialEq, Debug, Clone, Deserialize, Serialize)]
pub struct PublicKey {
fe: FieldElement,
}
impl PartialEq<&[u8]> for PublicKey {
fn eq(&self, other: &&[u8]) -> bool {
use core::convert::TryInto;
if other.len() != PUBLIC_KEY_SIZE {
return false;
}
let other: [u8; 32] = (*other).try_into().unwrap();
self.fe == FieldElement::from_bytes(&other)
}
}
impl From<[u8; PUBLIC_KEY_SIZE]> for PublicKey {
#[inline]
fn from(bytes: [u8; PUBLIC_KEY_SIZE]) -> Self {
Self {
fe: FieldElement::from_bytes(&bytes),
}
}
}
impl core::convert::TryFrom<&PrivateKey> for PublicKey {
type Error = UnknownCryptoError;
fn try_from(private_key: &PrivateKey) -> Result<Self, Self::Error> {
// NOTE: This implementation should be identical to key_agreement() except
// for the check of a resulting low order point result.
let scalar = Scalar::from_slice(private_key.unprotected_as_bytes())?;
Ok(PublicKey::from(
mont_ladder(&scalar, FieldElement::from_bytes(&BASEPOINT)).as_bytes(),
))
}
}
impl PublicKey {
#[must_use = "SECURITY WARNING: Ignoring a Result can have real security implications."]
/// Construct from a given byte slice.
pub fn from_slice(slice: &[u8]) -> Result<Self, UnknownCryptoError> {
use core::convert::TryInto;
let slice_len = slice.len();
if slice_len != PUBLIC_KEY_SIZE {
return Err(UnknownCryptoError);
}
Ok(Self {
fe: FieldElement::from_bytes(slice.try_into().unwrap()),
})
}
#[inline]
/// Return the length of the object.
pub fn len(&self) -> usize {
PUBLIC_KEY_SIZE
}
#[inline]
/// Return `true` if this object does not hold any data, `false` otherwise.
///
/// __NOTE__: This method should always return `false`, since there shouldn't be a way
/// to create an empty instance of this object.
pub fn is_empty(&self) -> bool {
PUBLIC_KEY_SIZE == 0
}
#[inline]
/// Convert this PublicKey to its byte-representation.
pub fn to_bytes(&self) -> [u8; 32] {
self.fe.as_bytes()
}
}
// NOTE: Scalar contains a constant-time PartialEq<Scalar> impl.
// NOTE: All newtypes impl Drop by default and Scalar has zeroizing Drop
/// A type to represent the `PrivateKey` that X25519 uses.
///
/// This type holds a scalar and is used internally as such. The scalar held is decoded
/// (a.k.a "clamped") as mandated in the [RFC](https://datatracker.ietf.org/doc/html/rfc7748#section-5).
///
/// # Errors:
/// An error will be returned if:
/// - `slice` is not 32 bytes.
///
/// # Panics:
/// A panic will occur if:
/// - Failure to generate random bytes securely.
///
///
/// # Security:
/// - __**Avoid using**__ `unprotected_as_bytes()` whenever possible, as it breaks all protections
/// that the type implements.
///
/// - The trait `PartialEq<&'_ [u8]>` is implemented for this type so that users are not tempted
/// to call `unprotected_as_bytes` to compare this sensitive value to a byte slice. The trait
/// is implemented in such a way that the comparison happens in constant time. Thus, users should
/// prefer `SecretType == &[u8]` over `SecretType.unprotected_as_bytes() == &[u8]`.
/// Examples are shown below. The examples apply to any type that implements `PartialEq<&'_ [u8]>`.
/// ```rust
/// # #[cfg(feature = "safe_api")] {
/// use orion::hazardous::ecc::x25519::PrivateKey;
///
/// // Initialize a secret key with random bytes.
/// let secret_key = PrivateKey::generate();
///
/// // Secure, constant-time comparison with a byte slice
/// assert_ne!(secret_key, &[0; 32][..]);
///
/// // Secure, constant-time comparison with another SecretKey
/// assert_ne!(secret_key, PrivateKey::generate());
/// # }
/// # Ok::<(), orion::errors::UnknownCryptoError>(())
/// ```
#[derive(PartialEq, Deserialize, Serialize)]
pub struct PrivateKey {
scalar: Scalar,
}
impl PartialEq<&[u8]> for PrivateKey {
fn eq(&self, other: &&[u8]) -> bool {
match Scalar::from_slice(*other) {
Ok(other_scalar) => self.scalar == other_scalar,
Err(_) => false,
}
}
}
impl core::fmt::Debug for PrivateKey {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{} {{***OMITTED***}}", stringify!(PrivateKey))
}
}
impl From<[u8; PRIVATE_KEY_SIZE]> for PrivateKey {
#[inline]
fn from(bytes: [u8; PRIVATE_KEY_SIZE]) -> Self {
PrivateKey {
// unwrap OK due to valid len
scalar: Scalar::from_slice(bytes.as_ref()).unwrap(),
}
}
}
impl_try_from_trait!(PrivateKey);
// impl_serde_traits!(PrivateKey, to_bytes);
impl PrivateKey {
#[must_use = "SECURITY WARNING: Ignoring a Result can have real security implications."]
/// Construct from a given byte slice.
pub fn from_slice(slice: &[u8]) -> Result<Self, UnknownCryptoError> {
Ok(Self {
scalar: Scalar::from_slice(slice)?,
})
}
#[inline]
/// Return the length of the object.
pub fn len(&self) -> usize {
PRIVATE_KEY_SIZE
}
#[inline]
/// Return `true` if this object does not hold any data, `false` otherwise.
///
/// __NOTE__: This method should always return `false`, since there shouldn't be a way
/// to create an empty instance of this object.
pub fn is_empty(&self) -> bool {
PRIVATE_KEY_SIZE == 0
}
#[inline]
/// Return the object as byte slice. __**Warning**__: Should not be used unless strictly
/// needed. This __**breaks protections**__ that the type implements.
pub fn unprotected_as_bytes(&self) -> &[u8] {
self.scalar.0.as_ref()
}
#[cfg(feature = "safe_api")]
/// Randomly generate using a CSPRNG. Not available in `no_std` context.
pub fn generate() -> PrivateKey {
let mut value = [0u8; PRIVATE_KEY_SIZE];
crate::util::secure_rand_bytes(&mut value).unwrap();
Self {
// unwrap OK due to valid len
scalar: Scalar::from_slice(&value).unwrap(),
}
}
}
construct_secret_key! {
/// A type to represent the `SharedKey` that X25519 produces.
///
/// This type simply holds bytes. Creating an instance from slices or similar,
/// performs no checks whatsoever.
///
/// # Errors:
/// An error will be returned if:
/// - `slice` is not 32 bytes.
(SharedKey, test_shared_key, SHARED_KEY_SIZE, SHARED_KEY_SIZE)
}
impl_from_trait!(SharedKey, SHARED_KEY_SIZE);
/// X25519 (Diffie-Hellman with Montgomery form of Curve25519).
pub fn key_agreement(
private_key: &PrivateKey,
public_key: &PublicKey,
) -> Result<SharedKey, UnknownCryptoError> {
let u_coord = public_key.fe;
let field_element = mont_ladder(&private_key.scalar, u_coord).as_bytes();
// High bit should be zero.
debug_assert_eq!(field_element[31] & 0b1000_0000u8, 0u8);
if secure_cmp(&field_element, &LOW_ORDER_POINT_RESULT).is_ok() {
return Err(UnknownCryptoError);
}
Ok(SharedKey::from(field_element))
}
#[cfg(test)]
mod public {
use crate::hazardous::ecc::x25519::{
key_agreement, PrivateKey, PublicKey, SharedKey, BASEPOINT,
};
#[test]
fn test_public_key_ignores_highbit() {
let u = [0u8; 32];
let mut msb_zero = u;
msb_zero[31] &= 127u8;
let mut msb_one = u;
msb_one[31] |= 128u8;
// These should equal each-other. The high bits differ, but should be ignored.
assert_eq!(PublicKey::from(msb_zero), msb_one.as_ref());
assert_eq!(PublicKey::from(msb_zero), PublicKey::from(msb_one));
}
#[test]
#[cfg(feature = "safe_api")]
fn test_highbit_ignored() {
// RFC 7748 dictates that the MSB of final byte must be masked when receiving a field element,
// used for agreement (public key). We check that modifying it does not impact the result of
// the agreement.
let k = PrivateKey::generate();
let mut u = [0u8; 32];
crate::util::secure_rand_bytes(&mut u).unwrap();
debug_assert_ne!(u[31] & 127u8, (u[31] & 127u8) | 128u8);
let mut u_msb_zero = u;
u_msb_zero[31] &= 127u8;
let mut u_msb_one = u;
u_msb_one[31] |= 128u8;
// Mask bit to 0 as we do in `FieldElement::from_bytes()`.
let msb_zero = key_agreement(&k, &PublicKey::from(u_msb_zero)).unwrap();
let msb_one = key_agreement(&k, &PublicKey::from(u_msb_one)).unwrap();
assert_eq!(msb_zero, msb_one);
}
#[test]
/// Ref: https://www.ietf.org/rfc/rfc7748.html#section-5.2
fn test_rfc_section_5() {
let mut scalar = [0u8; 32];
let mut point = [0u8; 32];
let mut expected = SharedKey::from([0u8; 32]);
hex::decode_to_slice(
"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
&mut scalar,
)
.unwrap();
hex::decode_to_slice(
"e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c",
&mut point,
)
.unwrap();
hex::decode_to_slice(
"c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552",
&mut expected.value,
)
.unwrap();
let actual = key_agreement(&PrivateKey::from(scalar), &PublicKey::from(point)).unwrap();
assert_eq!(actual, expected);
hex::decode_to_slice(
"4b66e9d4d1b4673c5ad22691957d6af5c11b6421e0ea01d42ca4169e7918ba0d",
&mut scalar,
)
.unwrap();
hex::decode_to_slice(
"e5210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493",
&mut point,
)
.unwrap();
hex::decode_to_slice(
"95cbde9476e8907d7aade45cb4b873f88b595a68799fa152e6f8f7647aac7957",
&mut expected.value,
)
.unwrap();
let actual = key_agreement(&PrivateKey::from(scalar), &PublicKey::from(point)).unwrap();
assert_eq!(actual, expected);
}
#[test]
/// Ref: https://www.ietf.org/rfc/rfc7748.html#section-5.2
fn test_rfc_section_5_iter() {
let mut k = BASEPOINT;
let mut u = BASEPOINT;
// 1 iter
let ret = key_agreement(&PrivateKey::from(k), &PublicKey::from(u)).unwrap();
u = k;
k = ret.value;
let mut expected = SharedKey::from([0u8; 32]);
hex::decode_to_slice(
"422c8e7a6227d7bca1350b3e2bb7279f7897b87bb6854b783c60e80311ae3079",
&mut expected.value,
)
.unwrap();
assert_eq!(k, expected.value, "Failed after 1 iter");
for _ in 0..999 {
let ret = key_agreement(&PrivateKey::from(k), &PublicKey::from(u)).unwrap();
u = k;
k = ret.value;
}
hex::decode_to_slice(
"684cf59ba83309552800ef566f2f4d3c1c3887c49360e3875f2eb94d99532c51",
&mut expected.value,
)
.unwrap();
assert_eq!(k, expected.value, "Failed after 1.000 iter");
/* Taking a decade...
for num in 0..999000 {
let ret = key_agreement(&PrivateKey::from(k), &PublicKey::from(u)).unwrap();
u = k;
k = ret.value;
}
hex::decode_to_slice(
"7c3911e0ab2586fd864497297e575e6f3bc601c0883c30df5f4dd2d24f665424",
&mut expected.value,
)
.unwrap();
assert_eq!(k, expected.value, "Failed after 1.000.000 iter");
*/
}
#[test]
/// Ref: https://www.ietf.org/rfc/rfc7748.html#section-6.1
fn test_rfc_section_6_pub_priv_basepoint() {
let mut alice_pub = [0u8; 32];
let mut alice_priv = [0u8; 32];
let mut bob_pub = [0u8; 32];
let mut bob_priv = [0u8; 32];
let mut shared = SharedKey::from([0u8; 32]);
hex::decode_to_slice(
"77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a",
&mut alice_priv,
)
.unwrap();
hex::decode_to_slice(
"8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a",
&mut alice_pub,
)
.unwrap();
assert_eq!(
key_agreement(&PrivateKey::from(alice_priv), &PublicKey::from(BASEPOINT)).unwrap(),
PublicKey::from(alice_pub).to_bytes().as_ref()
);
hex::decode_to_slice(
"5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb",
&mut bob_priv,
)
.unwrap();
hex::decode_to_slice(
"de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f",
&mut bob_pub,
)
.unwrap();
assert_eq!(
key_agreement(&PrivateKey::from(bob_priv), &PublicKey::from(BASEPOINT)).unwrap(),
PublicKey::from(bob_pub).to_bytes().as_ref()
);
hex::decode_to_slice(
"4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742",
&mut shared.value,
)
.unwrap();
assert_eq!(
key_agreement(&PrivateKey::from(alice_priv), &PublicKey::from(bob_pub)).unwrap(),
shared.value.as_ref()
);
assert_eq!(
key_agreement(&PrivateKey::from(bob_priv), &PublicKey::from(alice_pub)).unwrap(),
shared.value.as_ref()
);
}
}
| {
let x1 = point;
let mut x2 = FieldElement::one();
let mut x3 = x1;
let mut z3 = FieldElement::one();
let mut z2 = FieldElement::zero();
let mut tmp0: FieldElement;
let mut tmp1: FieldElement;
let mut swap: u8 = 0;
for idx in (0..=254).rev() {
let bit = (scalar.0[idx >> 3] >> (idx & 7)) & 1;
swap ^= bit;
FieldElement::conditional_swap(swap, &mut x2, &mut x3);
FieldElement::conditional_swap(swap, &mut z2, &mut z3);
swap = bit;
tmp0 = x3 - z3;
tmp1 = x2 - z2;
x2 = x2 + z2;
z2 = x3 + z3;
z3 = tmp0 * x2;
z2 = z2 * tmp1;
tmp0 = tmp1.square();
tmp1 = x2.square();
x3 = z3 + z2;
z2 = z3 - z2;
x2 = tmp1 * tmp0;
tmp1 = tmp1 - tmp0;
z2 = z2.square();
z3 = tmp1.mul_121666();
x3 = x3.square();
tmp0 = tmp0 + z3;
z3 = x1 * z2;
z2 = tmp1 * tmp0;
}
FieldElement::conditional_swap(swap, &mut x2, &mut x3);
FieldElement::conditional_swap(swap, &mut z2, &mut z3);
z2.invert();
x2 = x2 * z2;
x2
} |
images.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
from .common import fact, _copyB
from telegram_util import matchKey
def isCaption(item):
if not item:
return False
return 'caption' in str(item.attrs).lower() and item.name != 'img'
def _getCaption(item):
if isCaption(item.find_next()):
return item.find_next()
parent = item.parent
if not parent:
return
for x in parent.find_all():
if 'caption' in str(x.attrs).lower() and x.name != 'img':
return x
def _formatImgUrl(raw, domain):
parts = raw.split('/')
success = False
for index, part in enumerate(parts):
if part == 'max':
try:
if int(parts[index + 1]) > 0:
success = True
break
except:
pass
if success and 'guim' not in raw:
parts[index + 1] = '1300'
raw = '/'.join(parts)
if not matchKey(raw, ['guim']):
raw = re.sub('width=\d\d*', 'width=1300', raw)
if matchKey(raw, ['condecdn']):
raw = re.sub('/\d\d*/', '/1300/', raw)
if matchKey(raw, ['twreport']) and matchKey(raw, ['-tiny']):
raw = raw.replace('-tiny', '-desktop')
if raw.startswith('//'):
return 'https:' + raw
if raw.startswith('/'):
return domain + raw
return raw
MORE_CERTAIN_IMG_ATTRS = ['data-src-large', 'data-src', 'data-original-src', 'data-pin-media']
IMG_ATTRS = MORE_CERTAIN_IMG_ATTRS + ['src'] # people would put junk in src field
def formatRawImg(raw_img, domain):
|
def _getImgInsideFigure(figure, domain):
for raw_img in figure.find_all():
result = formatRawImg(raw_img, domain)
if result:
return result
figure.decompose()
def _cleanupFigure(figure, domain):
iframe = figure.find('iframe')
if iframe and iframe.get('src'):
r = fact().new_tag("iframe", src = _formatImgUrl(iframe['src'], domain))
iframe.replace_with(r)
return figure
img = _getImgInsideFigure(figure, domain)
if not img:
return
caption = figure.find('figcaption')
if not caption and img.get('title'):
caption = fact().new_tag("figcaption")
caption.append(img['title'])
new_figure = fact().new_tag("figure")
new_figure.append(img)
if caption:
new_figure.append(caption)
for cite in new_figure.find_all('cite'):
cite.decompose()
return new_figure
def _findnoscriptImg(img):
if not img.parent or len(str(img.parent)) > 1000 or \
len(list(img.parent.find_all('img'))) > 2:
return
if img.attrs and set(MORE_CERTAIN_IMG_ATTRS).intersection(set(img.attrs.keys())):
return
noscript = img.parent.find('noscript')
if not noscript:
return
return noscript.find('img')
def _yieldPossibleImg(soup):
possibles = [
soup.find_all("div", class_="js-delayed-image-load"),
soup.find_all("figure"),
soup.find_all("img"),
]
for l in possibles:
for x in l:
yield x
def _cleanupImages(soup, domain):
for img in soup.find_all("div", class_="js-delayed-image-load"):
img.name = 'img'
for img in soup.find_all('img'):
noscript_img = _findnoscriptImg(img)
if noscript_img:
img.replace_with(noscript_img)
for figure in soup.find_all('figure'):
try:
raw_img = figure.find('noscript').find('img')
figure.append(raw_img)
except:
...
for item in soup.find_all('noscript'):
item.decompose()
for figure in soup.find_all('figure'):
r = _cleanupFigure(figure, domain)
if r:
figure.replace_with(r)
else:
figure.decompose()
for img in soup.find_all('img'):
if img.parent and img.parent.name == 'figure':
continue
if not img.parent:
img.decompose()
continue
raw_caption = _getCaption(img)
if raw_caption:
caption = fact().new_tag("figcaption")
caption.append(_copyB(raw_caption))
figure = fact().new_tag("figure")
figure.append(_copyB(img))
if raw_caption:
figure.append(caption)
raw_caption.decompose()
r = _cleanupFigure(figure, domain)
if len(img.text) > 500: # the sixth tone have a strange background img formatting
continue
if r:
img.replace_with(r)
else:
img.decompose()
return soup | for attr in IMG_ATTRS:
if raw_img.get(attr) and not raw_img.get(attr).endswith('pdf'):
r = fact().new_tag("img", src = _formatImgUrl(raw_img[attr], domain))
if raw_img.get('title'):
r['title'] = raw_img.get('title')
return r |
aa_manager.py | """
Script for running American Airlines scraper directly from command line.
**Requirements**:
- Selenium
- Geckodriver
- Firefox web browser
- bs4 (BeautifulSoup)
Default name for file with search queries -
'search_tasks.json'(default location - script directory).
'search_tasks.json'(and similar files) must have following format:
[
{"departure": "departure_string(airport_code or state or city)",
"destination": "destination_string(airport_code or state or city)",
"date": "departure_date(mm/dd/yyyy)",
"return_date": "return_date_string"},
{"another search task and so on"}, {}, {}, ...
]
So, what kind of search you can perform and what you will get as a result?
For example, you can find all flights from Los Angeles to San Francisco that depart on 03/21/2018 just typing next:
aa_manager.py args LAX SFO 03/21/2018
(Take note of date format: mm/dd/yyyy).Entire search results will be placed in newly generated .json file(default
location of the file - script execution folder) with auto generated name like this one: 'LAX_SFO2018-03-21-220649.json'
(First 3 letters - departure airport code, second 3 letters - destination airport code, then follows a date of the
flight and timestump, which represent file's creation time: -HHMMSS).
Ok, lets look inside:
{
"depart": "03-21-2018 21:20:00",
"arrive": "03-21-2018 22:54:00",
"stops": "Nonstop",
"price": "46.00",
"details": [
{
"number": "AA 6039",
"airplane": "E75-Embraer RJ-175"
}
]
}
Values keys speaks for themselves, didn't they? Well, only "price" key need a bit of explanation: this key shows
lowest price for selected flight. Also, if you see "N/A" - that's probably mean you need to buy ticket directly
at airport or searched class("basic economy", "main cabin" etc) not available for most of the flights.
WELL, WOW. But one more example: let's say - you wanna know list of all flight from all Alabama state airports
to San Francisco? Not a problem - just type a state name instead of specific airport code.(Better to use
parallel execution for this type of task):
aa_manager.py -sp args Alabama SFO 03/21/2018
And several .json files (one for each Alabama airport) will be saved into execution folder.
Here is some **help information**:
usage: aa_manager.py [-h] [-sp | -ss] {run,args} ...
positional arguments:
{run,args}
run Execute search tasks from a file (default method - serial)
args Enter search parameters from command line and run
search(default execution method - serial)
optional arguments:
-h, --help show this help message and exit
-sp, --parallel Perform parallel search
-ss, --serial Perform search, using serial execution(tusks executed 'one-
by-one'). It's a default method.
**run**:
usage: aa_manager.py run [-h] [-f FILE_NAME]
-f FILE_NAME, --file FILE_NAME
File name (and full path, if needed) to the file with
search tasks
**args**:
usage: aa_manager.py args [-h] departure_airport destination_airport departure_date [return_date]
positional arguments:
departure_airport Departure airport's code(or departure city/state - in
this case list of all airports in the city/state will
be formed)
destination_airport Destination airport's code(or destination city/state - in
this case list of all airports in the city/state will be
formed)
departure_date Departure date
return_date Return date(optional). Enter this parameter only for
round trips
"""
import json
import re
import datetime
import argparse
from multiprocessing import Pool
from american_airlines import AmericanAirlines
AIRPORTS_CODES = "airports.json" # this file contain all available for search airports codes
NUM_PROCESSES = 4 # default number of processes for parallel execution
SEARCH_TASKS = "search_tasks.json" # default name for .json file with search queries
def get_airports_codes(airports_file):
"""Loading airports codes, city and state names form .json file(default: AIRPORTS_CODES)"""
with open(airports_file, 'r') as file:
text = file.read()
# yeh, 'airports.json' doesnt have proper format, so we just making some transformations
new_text = re.sub(r'}{', '},{', text)
new_text = re.sub(r"^{", '[{', new_text)
new_text = re.sub(r'}$', '}]', new_text)
return json.loads(new_text)
def get_search_tasks(tasks_file):
""" Loading search queries from .json file(default - 'search_tasks.json')"""
with open(tasks_file, 'r') as file:
return json.load(file)
def validate_airport_name(airports_list, airport_name):
""" Here we compering entered airport name to names from AIRPORTS_CODES.
Three types of names are permitted: airport code, city name(where airport located), state name.
"""
if not isinstance(airport_name, str):
raise TypeError("Airport name is not a String!")
for airport in airports_list:
if airport['code'].lower() == airport_name.lower():
return 'code'
if (airport['city']).lower() == airport_name.lower():
return 'city'
if (airport['state']).lower() == airport_name.lower():
return 'state'
return 'none' # no matches found
def validate_date_string(date_string):
"""Date string must have following format: dd/dd/dddd (where d - single digit from 0 to 9)"""
return re.fullmatch(r'^\d{2}/\d{2}/\d{4}$', date_string)
def transform_string_to_date(date_string):
"""Transforming date string to datetime.date format. Check data with 'validate_date_string' first"""
month, day, year = date_string.split("/")
return datetime.date(year=int(year), month=int(month), day=int(day))
def check_dates(task_dictionary):
""" Validating departure and return dates. Note that :param task_dictionary: must have proper form
Main usage - inside 'check_and_quantize_tasks' function.
"""
current_date = datetime.date.today()
departure_date_string = task_dictionary['date'] # existing of this field checked in 'check_and_quantize_tasks'
# checking string date to be formatted like mm/dd/yyyy
if not validate_date_string(departure_date_string):
raise ValueError("Departure date string has inappropriate format!")
# if date_string has something like this: 34/34/2018 - ValueError will be raised
departure_date = transform_string_to_date(departure_date_string)
# checking for departure date not from the past
if current_date > departure_date:
raise ValueError("Departure date must be today or in the future(no past dates allowed)")
try:
return_date_string = task_dictionary['return_date']
if not validate_date_string(return_date_string):
raise ValueError("Return date string has inappropriate format!")
return_date = transform_string_to_date(return_date_string)
if departure_date > return_date:
raise ValueError("Return date must be at same day or after departure date!")
except KeyError:
pass # if no return_date - than it's 'one way' trip
def airports_codes_from_city(name, airports_list, airport_type):
"""
Here we finding all airports(their codes) in city or state.
:param name: name of airport we gonna check
:param airports_list: list of all airports
:param airport_type: type of :param name: - 'code', 'city', 'state'
:return: list of airports codes
"""
temp = []
for airport in airports_list:
if name.lower() == airport[airport_type].lower():
temp.append(airport['code'])
return temp
def check_and_quantize_tasks(tasks_dictionaries, airports_list):
"""
Here we perform 'quantization' of search queries to the form which can be
executed inside 'execute_single_crawler' function. Each task will be a list, that contains:
departure airport code, destination airport code, departure date, return date('None' for 'one way' trip),
trip type('one way' or 'round trip').
:param tasks_dictionaries: list of dictionaries (Our search queries).
:param airports_list: return result from 'get_airports_codes' function
:return: list of lists.
"""
tasks_list = [] # returns list of lists
for dictionary in tasks_dictionaries:
return_date = None
# validating 'departure' key
try:
departure_airport = dictionary['departure']
airport_type = validate_airport_name(airports_list, departure_airport)
if airport_type == "none":
print("Invalid airport name")
continue
departure_codes = airports_codes_from_city(departure_airport, airports_list, airport_type)
except (KeyError, ValueError) as e:
print(e.__str__())
continue # skip this query dictionary if there is invalid data
# validating destination key
try:
destination_airport = dictionary['destination']
airport_type = validate_airport_name(airports_list, destination_airport)
if airport_type == "none":
print("Invalid airport name")
continue
destination_codes = airports_codes_from_city(destination_airport, airports_list, airport_type)
except (KeyError, ValueError) as e:
print(e.__str__())
continue
# validating 'date'
try:
departure_date = dictionary['date']
check_dates(dictionary) # here we also checking 'return_date' if present
except (KeyError, ValueError) as e:
print(e.__str__())
continue
# validating "return_date" (we don't always need a return date)
try:
return_date = dictionary['return_date']
except KeyError:
pass
# forming tasks (list of lists)
for dep_airport in departure_codes: # from each airport in departure list
for dest_airport in destination_codes: # to every single airport in destination list
if return_date is None:
trip_type = "one way"
else:
trip_type = 'round trip'
# that is actually our 'quantized' task
temp = [dep_airport, dest_airport, departure_date, return_date, trip_type]
tasks_list.append(temp)
# if we cant identify even a single task - something wrong with input data
if not tasks_list:
raise ValueError('No tusks for execution found. Check input format!')
return tasks_list
def execute_single_crawler(list_of_arguments):
"""This function create and execute single instance of AmericanAirlines() class"""
crawler = AmericanAirlines(departure_airport=list_of_arguments[0], destination_airport=list_of_arguments[1],
departure_date=list_of_arguments[2], return_date=list_of_arguments[3],
trip_type=list_of_arguments[4])
crawler.run()
def serial_execution(tasks_list):
for task in tasks_list:
execute_single_crawler(task)
def multiprocesses_execution(tasks_list):
|
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Getting flights information (depart, arrive, number of stops, "
"lowest price etc) from American Airlines web site search form "
"using user-provided(from file or command line) search parameters")
# ok, parallel or serial types of execution should exclude each other
group1 = parser.add_mutually_exclusive_group()
# command for parallel execution
group1.add_argument('-sp', '--parallel',
help="Perform parallel search",
const="parallel",
action='store_const',
dest='execution_method')
# command for serial execution
group1.add_argument('-ss', '--serial',
help="Perform search, using serial execution(tusks executed 'one-by-one'). "
"It's a default method.",
const="serial",
action='store_const',
dest='execution_method')
# Serial execution will be default method, if parallel not mentioned explicitly
parser.set_defaults(execution_method='serial')
# creating 2 subparsers(run and args) with name 'subcommand' (parser.pars_args().subcommand - name of subparser)
subparsers = parser.add_subparsers(dest="subcommand")
# parser_a will get tasks list from a file
parser_a = subparsers.add_parser('run', help="Execute search tasks from a file (default method - serial)")
# command for loading tasks from file (default 'search_tasks.json')
parser_a.add_argument('-f', '--file',
help="File name (and full path, if needed) to the file with search tasks.",
default=SEARCH_TASKS,
action='store',
dest='file_name')
# parser_b will accept search parameters from command line
parser_b = subparsers.add_parser('args',
help="Enter search parameters from command line "
"and run search(default execution method - serial)")
# entering departure airport name
parser_b.add_argument('departure_airport',
help="Departure airport's code(or departure city/state - in this case list "
"of all airports in the city/state will be formed)",
action='store',
)
# entering destination airport name/code
parser_b.add_argument('destination_airport',
help="Destination airport's code(or departure city/state - in this case list"
"of all airports in the city/state will be formed)",
action='store',
)
# entering departure date
parser_b.add_argument('departure_date',
help="Departure date",
action='store',
)
# entering return date(optionally):
parser_b.add_argument('return_date',
help="Return date(optional). Enter this parameter only for round trips",
nargs='?',
action='store',
)
# getting our arguments
args = parser.parse_args()
list_of_airports = get_airports_codes(AIRPORTS_CODES)
# ok, here is block for 'file execution' logic
if args.subcommand == 'run':
print("Starting {} execution of search commands from file: '{}'".format(args.execution_method, args.file_name))
# Opening and loading file with search tasks
potential_tasks = get_search_tasks(args.file_name)
# Validating data and forming commands from search tasks:
list_of_tasks = check_and_quantize_tasks(potential_tasks, list_of_airports)
# this is block for "command line" search arguments logic
elif args.subcommand == 'args':
print("Staring {} execution of provided search command...".format(args.execution_method))
# forming dictionary with potential search parameters
search_dict = [{'departure': args.departure_airport,
'destination': args.destination_airport,
'date': args.departure_date}]
if args.return_date is not None:
search_dict[0]['return_date'] = args.return_date
list_of_tasks = check_and_quantize_tasks(search_dict, list_of_airports)
if args.execution_method == 'serial':
serial_execution(list_of_tasks)
elif args.execution_method == 'parallel':
multiprocesses_execution(list_of_tasks)
print("All jobs done!")
| with Pool(processes=NUM_PROCESSES) as pool:
pool.map(execute_single_crawler, tasks_list) |
arg.go | package main
import (
"fmt"
"go/ast"
"go/token"
"log"
"strconv"
"github.com/mewkiz/pkg/errutil"
"golang.org/x/arch/x86/x86asm"
)
// getArg converts arg into a corresponding Go expression.
func getArg(arg x86asm.Arg) ast.Expr {
switch arg := arg.(type) {
case x86asm.Reg:
return getReg(arg)
case x86asm.Mem:
return getMem(arg)
case x86asm.Imm:
return createExpr(int64(arg))
case x86asm.Rel:
// TODO: Implement support for relative addresses.
}
fmt.Printf("%#v\n", arg)
log.Fatal(errutil.Newf("support for type %T not yet implemented", arg))
panic("unreachable")
}
// regs maps register names to their corresponding Go identifiers.
var regs = map[string]*ast.Ident{
// 8-bit
"AL": ast.NewIdent("al"),
"CL": ast.NewIdent("cl"),
"DL": ast.NewIdent("dl"),
"BL": ast.NewIdent("bl"),
"AH": ast.NewIdent("ah"),
"CH": ast.NewIdent("ch"),
"DH": ast.NewIdent("dh"),
"BH": ast.NewIdent("bh"),
"SPB": ast.NewIdent("spb"),
"BPB": ast.NewIdent("bpb"),
"SIB": ast.NewIdent("sib"),
"DIB": ast.NewIdent("dib"),
"R8B": ast.NewIdent("r8b"),
"R9B": ast.NewIdent("r9b"),
"R10B": ast.NewIdent("r10b"),
"R11B": ast.NewIdent("r11b"),
"R12B": ast.NewIdent("r12b"),
"R13B": ast.NewIdent("r13b"),
"R14B": ast.NewIdent("r14b"),
"R15B": ast.NewIdent("r15b"),
// 16-bit
"AX": ast.NewIdent("ax"),
"CX": ast.NewIdent("cx"),
"DX": ast.NewIdent("dx"),
"BX": ast.NewIdent("bx"),
"SP": ast.NewIdent("sp"),
"BP": ast.NewIdent("bp"),
"SI": ast.NewIdent("si"),
"DI": ast.NewIdent("di"),
"R8W": ast.NewIdent("r8w"),
"R9W": ast.NewIdent("r9w"),
"R10W": ast.NewIdent("r10w"),
"R11W": ast.NewIdent("r11w"),
"R12W": ast.NewIdent("r12w"),
"R13W": ast.NewIdent("r13w"),
"R14W": ast.NewIdent("r14w"),
"R15W": ast.NewIdent("r15w"),
// 32-bit
"EAX": ast.NewIdent("eax"),
"ECX": ast.NewIdent("ecx"),
"EDX": ast.NewIdent("edx"),
"EBX": ast.NewIdent("ebx"),
"ESP": ast.NewIdent("esp"),
"EBP": ast.NewIdent("ebp"),
"ESI": ast.NewIdent("esi"),
"EDI": ast.NewIdent("edi"),
"R8L": ast.NewIdent("r8l"),
"R9L": ast.NewIdent("r9l"),
"R10L": ast.NewIdent("r10l"),
"R11L": ast.NewIdent("r11l"),
"R12L": ast.NewIdent("r12l"),
"R13L": ast.NewIdent("r13l"),
"R14L": ast.NewIdent("r14l"),
"R15L": ast.NewIdent("r15l"),
// 64-bit
"RAX": ast.NewIdent("rax"),
"RCX": ast.NewIdent("rcx"),
"RDX": ast.NewIdent("rdx"),
"RBX": ast.NewIdent("rbx"),
"RSP": ast.NewIdent("rsp"),
"RBP": ast.NewIdent("rbp"),
"RSI": ast.NewIdent("rsi"),
"RDI": ast.NewIdent("rdi"),
"R8": ast.NewIdent("r8"),
"R9": ast.NewIdent("r9"),
"R10": ast.NewIdent("r10"),
"R11": ast.NewIdent("r11"),
"R12": ast.NewIdent("r12"),
"R13": ast.NewIdent("r13"),
"R14": ast.NewIdent("r14"),
"R15": ast.NewIdent("r15"),
// Instruction pointer.
"IP": ast.NewIdent("ip"), // 16-bit
"EIP": ast.NewIdent("eip"), // 32-bit
"RIP": ast.NewIdent("rip"), // 64-bit
// 387 floating point registers.
"F0": ast.NewIdent("f0"),
"F1": ast.NewIdent("f1"),
"F2": ast.NewIdent("f2"),
"F3": ast.NewIdent("f3"),
"F4": ast.NewIdent("f4"),
"F5": ast.NewIdent("f5"),
"F6": ast.NewIdent("f6"),
"F7": ast.NewIdent("f7"),
// MMX registers.
"M0": ast.NewIdent("m0"),
"M1": ast.NewIdent("m1"),
"M2": ast.NewIdent("m2"),
"M3": ast.NewIdent("m3"),
"M4": ast.NewIdent("m4"),
"M5": ast.NewIdent("m5"),
"M6": ast.NewIdent("m6"),
"M7": ast.NewIdent("m7"),
// XMM registers.
"X0": ast.NewIdent("x0"),
"X1": ast.NewIdent("x1"),
"X2": ast.NewIdent("x2"),
"X3": ast.NewIdent("x3"),
"X4": ast.NewIdent("x4"),
"X5": ast.NewIdent("x5"),
"X6": ast.NewIdent("x6"),
"X7": ast.NewIdent("x7"),
"X8": ast.NewIdent("x8"),
"X9": ast.NewIdent("x9"),
"X10": ast.NewIdent("x10"),
"X11": ast.NewIdent("x11"),
"X12": ast.NewIdent("x12"),
"X13": ast.NewIdent("x13"),
"X14": ast.NewIdent("x14"),
"X15": ast.NewIdent("x15"),
// Segment registers.
"ES": ast.NewIdent("es"),
"CS": ast.NewIdent("cs"),
"SS": ast.NewIdent("ss"),
"DS": ast.NewIdent("ds"),
"FS": ast.NewIdent("fs"),
"GS": ast.NewIdent("gs"),
// System registers.
"GDTR": ast.NewIdent("gdtr"),
"IDTR": ast.NewIdent("idtr"),
"LDTR": ast.NewIdent("ldtr"),
"MSW": ast.NewIdent("msw"),
"TASK": ast.NewIdent("task"),
// Control registers.
"CR0": ast.NewIdent("cr0"),
"CR1": ast.NewIdent("cr1"),
"CR2": ast.NewIdent("cr2"),
"CR3": ast.NewIdent("cr3"),
"CR4": ast.NewIdent("cr4"),
"CR5": ast.NewIdent("cr5"),
"CR6": ast.NewIdent("cr6"),
"CR7": ast.NewIdent("cr7"),
"CR8": ast.NewIdent("cr8"),
"CR9": ast.NewIdent("cr9"),
"CR10": ast.NewIdent("cr10"),
"CR11": ast.NewIdent("cr11"),
"CR12": ast.NewIdent("cr12"),
"CR13": ast.NewIdent("cr13"),
"CR14": ast.NewIdent("cr14"),
"CR15": ast.NewIdent("cr15"),
// Debug registers.
"DR0": ast.NewIdent("dr0"),
"DR1": ast.NewIdent("dr1"),
"DR2": ast.NewIdent("dr2"),
"DR3": ast.NewIdent("dr3"),
"DR4": ast.NewIdent("dr4"),
"DR5": ast.NewIdent("dr5"),
"DR6": ast.NewIdent("dr6"),
"DR7": ast.NewIdent("dr7"),
"DR8": ast.NewIdent("dr8"),
"DR9": ast.NewIdent("dr9"),
"DR10": ast.NewIdent("dr10"),
"DR11": ast.NewIdent("dr11"),
"DR12": ast.NewIdent("dr12"),
"DR13": ast.NewIdent("dr13"),
"DR14": ast.NewIdent("dr14"),
"DR15": ast.NewIdent("dr15"),
// Task registers.
"TR0": ast.NewIdent("tr0"),
"TR1": ast.NewIdent("tr1"),
"TR2": ast.NewIdent("tr2"),
"TR3": ast.NewIdent("tr3"),
"TR4": ast.NewIdent("tr4"),
"TR5": ast.NewIdent("tr5"),
"TR6": ast.NewIdent("tr6"),
"TR7": ast.NewIdent("tr7"),
}
// getReg converts reg into a corresponding Go expression.
func getReg(reg x86asm.Reg) ast.Expr {
return getRegFromString(reg.String())
}
// getRegFromString converts reg into a corresponding Go expression.
func getRegFromString(reg string) ast.Expr {
if expr, ok := regs[reg]; ok {
return expr
}
log.Fatal(errutil.Newf("unable to lookup identifer for register %q", reg))
panic("unreachable")
}
// getMem converts mem into a corresponding Go expression.
func getMem(mem x86asm.Mem) ast.Expr {
// TODO: Replace 1*x with x in Scale*Index.
// The general memory reference form is:
// Segment:[Base+Scale*Index+Disp]
// ... + Disp
expr := &ast.BinaryExpr{}
if mem.Disp != 0 {
disp := createExpr(mem.Disp)
expr.Op = token.ADD
expr.Y = disp
}
// ... + (Scale*Index) + ...
if mem.Scale != 0 && mem.Index != 0 {
scale := createExpr(mem.Scale)
index := getReg(mem.Index)
product := &ast.BinaryExpr{
X: scale,
Op: token.MUL,
Y: index,
}
switch {
case expr.Y == nil:
// ... + (Scale*Index)
expr.Op = token.ADD
expr.Y = product
default:
// ... + (Scale*Index) + Disp
expr.X = product
expr.Op = token.ADD
}
}
// ... + Base + ...
if mem.Base != 0 {
base := getReg(mem.Base)
switch {
case expr.X == nil:
// Base + (Scale*Index)
// or
// Base + Disp
expr.X = base
expr.Op = token.ADD
case expr.Y == nil:
// ... + Base
expr.Op = token.ADD
expr.Y = base
default:
sum := &ast.BinaryExpr{
X: expr.X,
Op: token.ADD,
Y: expr.Y,
}
expr.X = base
expr.Op = token.ADD
expr.Y = sum
}
}
// TODO: Figure out how the calculation is affected by segment in:
// Segment:[Base+Scale*Index+Disp]
if mem.Segment != 0 {
segment := getReg(mem.Segment)
_ = segment
fmt.Printf("%#v\n", mem)
log.Fatal(errutil.Newf("support for Mem.Segment not yet implemented"))
}
switch {
case expr.X == nil && expr.Y == nil:
fmt.Printf("%#v\n", mem)
log.Fatal(errutil.New("support for memory reference to address zero not yet implemented"))
panic("unreachable")
case expr.X == nil && expr.Y != nil:
return createPtrDeref(expr.Y)
case expr.X != nil && expr.Y == nil:
return createPtrDeref(expr.X)
default:
return createPtrDeref(expr)
}
}
// createPtrDeref returns a pointer dereference expression of addr.
func createPtrDeref(addr ast.Expr) ast.Expr {
return &ast.StarExpr{X: &ast.ParenExpr{X: addr}}
}
// createExpr converts x into a corresponding Go expression.
func createExpr(x interface{}) ast.Expr |
// fromSubReg returns an equivalent expression to x, where x may be a sub-
// register.
func fromSubReg(sub ast.Expr) ast.Expr {
// TODO: Handle sub-registers (al, ah, ax)
// TODO: Fix operator precedence for C.
// warning: & has lower precedence than <; < will be evaluated first
// cf = *((int8_t *)ebp + -1) < ebx&255;
// Handle sub-registers (e.g. al, ah, ax).
if isSubLow8(sub) {
// Before:
// al
// After:
// eax&0x000000FF
return &ast.BinaryExpr{
X: extendSubReg(sub),
Op: token.AND,
Y: createExpr(0x000000FF),
}
}
if isSubHigh8(sub) {
// Before:
// ah
// After:
// (eax&0x0000FF00)>>8
paren := &ast.ParenExpr{
X: &ast.BinaryExpr{
X: extendSubReg(sub),
Op: token.AND,
Y: createExpr(0x0000FF00),
},
}
return &ast.BinaryExpr{
X: paren,
Op: token.SHR,
Y: createExpr(8),
}
}
if isSub16(sub) {
panic("not yet implemented.")
}
return sub
}
// subLow8 maps lower 8-bit sub-registers to their parent register.
var subLow8 = map[string]string{
"al": "EAX",
"cl": "ECX",
"dl": "EDX",
"bl": "EBX",
"spb": "ESP",
"bpb": "EBP",
"sib": "ESI",
"dib": "EDI",
"r8b": "R8L",
"r9b": "R9L",
"r10b": "R10L",
"r11b": "R11L",
"r12b": "R12L",
"r13b": "R13L",
"r14b": "R14L",
"r15b": "R15L",
}
// isSubLow8 reports whether x is a lower 8-bit sub-register.
func isSubLow8(x ast.Expr) bool {
if sub, ok := x.(*ast.Ident); ok {
_, ok = subLow8[sub.Name]
return ok
}
return false
}
// subHigh8 maps higher 8-bit sub-registers to their parent register.
var subHigh8 = map[string]string{
"ah": "EAX",
"ch": "ECX",
"dh": "EDX",
"bh": "EBX",
}
// isSubHigh8 reports whether x is a higher 8-bit sub-register.
func isSubHigh8(x ast.Expr) bool {
if sub, ok := x.(*ast.Ident); ok {
_, ok = subHigh8[sub.Name]
return ok
}
return false
}
// sub16 maps 16-bit sub-registers to their parent register.
var sub16 = map[string]string{
"ax": "EAX",
"cx": "ECX",
"dx": "EDX",
"bx": "EBX",
"sp": "ESP",
"bp": "EBP",
"si": "ESI",
"di": "EDI",
"r8w": "R8L",
"r9w": "R9L",
"r10w": "R10L",
"r11w": "R11L",
"r12w": "R12L",
"r13w": "R13L",
"r14w": "R14L",
"r15w": "R15L",
}
// isSub16 reports whether x is a 16-bit sub-register.
func isSub16(x ast.Expr) bool {
if sub, ok := x.(*ast.Ident); ok {
_, ok = sub16[sub.Name]
return ok
}
return false
}
// extendSubReg returns the parent register of x if x is a sub-register.
func extendSubReg(x ast.Expr) ast.Expr {
sub, ok := x.(*ast.Ident)
if !ok {
return x
}
// Lower 8-bit sub-registers.
if reg, ok := subLow8[sub.Name]; ok {
return getRegFromString(reg)
}
// Higher 8-bit sub-registers.
if reg, ok := subHigh8[sub.Name]; ok {
return getRegFromString(reg)
}
// 16-bit sub-registers.
if reg, ok := sub16[sub.Name]; ok {
return getRegFromString(reg)
}
return x
}
| {
switch x := x.(type) {
case int:
s := strconv.FormatInt(int64(x), 10)
return &ast.BasicLit{Kind: token.INT, Value: s}
case int64:
s := strconv.FormatInt(x, 10)
return &ast.BasicLit{Kind: token.INT, Value: s}
case uint8:
s := strconv.FormatUint(uint64(x), 10)
return &ast.BasicLit{Kind: token.INT, Value: s}
}
log.Fatal(errutil.Newf("support for type %T not yet implemented", x))
panic("unreachable")
} |
span_processor.rs | //! # OpenTelemetry Span Processor Interface
//!
//! Span processor is an interface which allows hooks for span start and end method
//! invocations. The span processors are invoked only when
//! [`is_recording`] is true.
//!
//! Built-in span processors are responsible for batching and conversion of spans to
//! exportable representation and passing batches to exporters.
//!
//! Span processors can be registered directly on SDK [`TracerProvider`] and they are
//! invoked in the same order as they were registered.
//!
//! All `Tracer` instances created by a `TracerProvider` share the same span processors.
//! Changes to this collection reflect in all `Tracer` instances.
//!
//! The following diagram shows `SpanProcessor`'s relationship to other components
//! in the SDK:
//!
//! ```ascii
//! +-----+--------------+ +-----------------------+ +-------------------+
//! | | | | | | |
//! | | | | (Batch)SpanProcessor | | SpanExporter |
//! | | +---> (Simple)SpanProcessor +---> (JaegerExporter) |
//! | | | | | | |
//! | SDK | Tracer.span()| +-----------------------+ +-------------------+
//! | | Span.end() |
//! | | | +---------------------+
//! | | | | |
//! | | +---> ZPagesProcessor |
//! | | | | |
//! +-----+--------------+ +---------------------+
//! ```
//!
//! [`is_recording`]: crate::trace::Span::is_recording()
//! [`TracerProvider`]: crate::trace::TracerProvider
use crate::global;
use crate::runtime::Runtime;
use crate::sdk::trace::Span;
use crate::{
sdk::export::trace::{ExportResult, SpanData, SpanExporter},
trace::{TraceError, TraceResult},
Context,
};
use futures::{channel::mpsc, channel::oneshot, executor, future::Either, pin_mut, StreamExt};
use std::env;
use std::{fmt, str::FromStr, sync::Mutex, time::Duration};
/// Delay interval between two consecutive exports.
const OTEL_BSP_SCHEDULE_DELAY: &str = "OTEL_BSP_SCHEDULE_DELAY";
/// Default delay interval between two consecutive exports.
const OTEL_BSP_SCHEDULE_DELAY_DEFAULT: u64 = 5_000;
/// Maximum queue size
const OTEL_BSP_MAX_QUEUE_SIZE: &str = "OTEL_BSP_MAX_QUEUE_SIZE";
/// Default maximum queue size
const OTEL_BSP_MAX_QUEUE_SIZE_DEFAULT: usize = 2_048;
/// Maximum batch size, must be less than or equal to OTEL_BSP_MAX_QUEUE_SIZE
const OTEL_BSP_MAX_EXPORT_BATCH_SIZE: &str = "OTEL_BSP_MAX_EXPORT_BATCH_SIZE";
/// Default maximum batch size
const OTEL_BSP_MAX_EXPORT_BATCH_SIZE_DEFAULT: usize = 512;
/// Maximum allowed time to export data.
const OTEL_BSP_EXPORT_TIMEOUT: &str = "OTEL_BSP_EXPORT_TIMEOUT";
/// Default maximum allowed time to export data.
const OTEL_BSP_EXPORT_TIMEOUT_DEFAULT: u64 = 30_000;
/// `SpanProcessor` is an interface which allows hooks for span start and end
/// method invocations. The span processors are invoked only when is_recording
/// is true.
pub trait SpanProcessor: Send + Sync + std::fmt::Debug {
/// `on_start` is called when a `Span` is started. This method is called
/// synchronously on the thread that started the span, therefore it should
/// not block or throw exceptions.
fn on_start(&self, span: &Span, cx: &Context);
/// `on_end` is called after a `Span` is ended (i.e., the end timestamp is
/// already set). This method is called synchronously within the `Span::end`
/// API, therefore it should not block or throw an exception.
fn on_end(&self, span: SpanData);
/// Force the spans lying in the cache to be exported.
fn force_flush(&self) -> TraceResult<()>;
/// Shuts down the processor. Called when SDK is shut down. This is an
/// opportunity for processors to do any cleanup required.
fn shutdown(&mut self) -> TraceResult<()>;
}
/// A [`SpanProcessor`] that exports synchronously when spans are finished.
///
/// # Examples
///
/// Note that the simple processor exports synchronously every time a span is
/// ended. If you find this limiting, consider the batch processor instead.
///
/// ```
/// use opentelemetry::{trace as apitrace, sdk::trace as sdktrace, global};
///
/// // Configure your preferred exporter
/// let exporter = apitrace::NoopSpanExporter::new();
///
/// // Then use the `with_simple_exporter` method to have the provider export when spans finish.
/// let provider = sdktrace::TracerProvider::builder()
/// .with_simple_exporter(exporter)
/// .build();
///
/// let previous_provider = global::set_tracer_provider(provider);
/// ```
#[derive(Debug)]
pub struct SimpleSpanProcessor {
exporter: Mutex<Box<dyn SpanExporter>>,
}
impl SimpleSpanProcessor {
pub(crate) fn new(exporter: Box<dyn SpanExporter>) -> Self {
SimpleSpanProcessor {
exporter: Mutex::new(exporter),
}
}
}
impl SpanProcessor for SimpleSpanProcessor {
fn on_start(&self, _span: &Span, _cx: &Context) {
// Ignored
}
fn on_end(&self, span: SpanData) {
let result = self
.exporter
.lock()
.map_err(|_| TraceError::Other("simple span processor mutex poisoned".into()))
.and_then(|mut exporter| executor::block_on(exporter.export(vec![span])));
if let Err(err) = result {
global::handle_error(err);
}
}
fn force_flush(&self) -> TraceResult<()> {
// Ignored since all spans in Simple Processor will be exported as they ended.
Ok(())
}
fn shutdown(&mut self) -> TraceResult<()> {
if let Ok(mut exporter) = self.exporter.lock() | else {
Err(TraceError::Other(
"When shutting down the SimpleSpanProcessor, the exporter's lock has been poisoned"
.into(),
))
}
}
}
/// A [`SpanProcessor`] that asynchronously buffers finished spans and reports
/// them at a preconfigured interval.
///
/// # Examples
///
/// This processor can be configured with an [`executor`] of your choice to
/// batch and upload spans asynchronously when they end. If you have added a
/// library like [`tokio`] or [`async-std`], you can pass in their respective
/// `spawn` and `interval` functions to have batching performed in those
/// contexts.
///
/// ```
/// # #[cfg(feature="tokio")]
/// # {
/// use futures::{stream};
/// use opentelemetry::{trace as apitrace, sdk::trace as sdktrace, global, runtime};
/// use std::time::Duration;
///
/// #[tokio::main]
/// async fn main() {
/// // Configure your preferred exporter
/// let exporter = apitrace::NoopSpanExporter::new();
///
/// // Then build a batch processor. You can use whichever executor you have available, for
/// // example if you are using `async-std` instead of `tokio` you can replace the spawn and
/// // interval functions with `async_std::task::spawn` and `async_std::stream::interval`.
/// let batch = sdktrace::BatchSpanProcessor::builder(exporter, runtime::Tokio)
/// .with_max_queue_size(4096)
/// .build();
///
/// // Then use the `with_batch_exporter` method to have the provider export spans in batches.
/// let provider = sdktrace::TracerProvider::builder()
/// .with_batch_exporter(batch)
/// .build();
///
/// let guard = global::set_tracer_provider(provider);
/// # drop(guard)
/// }
/// # }
/// ```
///
/// [`executor`]: https://docs.rs/futures/0.3/futures/executor/index.html
/// [`tokio`]: https://tokio.rs
/// [`async-std`]: https://async.rs
pub struct BatchSpanProcessor {
message_sender: Mutex<mpsc::Sender<BatchMessage>>,
}
impl fmt::Debug for BatchSpanProcessor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("BatchSpanProcessor")
.field("message_sender", &self.message_sender)
.finish()
}
}
impl SpanProcessor for BatchSpanProcessor {
fn on_start(&self, _span: &Span, _cx: &Context) {
// Ignored
}
fn on_end(&self, span: SpanData) {
let result = self
.message_sender
.lock()
.map_err(|_| TraceError::Other("batch span processor mutex poisoned".into()))
.and_then(|mut sender| {
sender
.try_send(BatchMessage::ExportSpan(span))
.map_err(|err| TraceError::Other(err.into()))
});
if let Err(err) = result {
global::handle_error(err);
}
}
fn force_flush(&self) -> TraceResult<()> {
let mut sender = self.message_sender.lock().map_err(|_| TraceError::from("When force flushing the BatchSpanProcessor, the message sender's lock has been poisoned"))?;
let (res_sender, res_receiver) = oneshot::channel::<Vec<ExportResult>>();
sender.try_send(BatchMessage::Flush(Some(res_sender)))?;
for result in futures::executor::block_on(res_receiver)? {
result?;
}
Ok(())
}
fn shutdown(&mut self) -> TraceResult<()> {
let mut sender = self.message_sender.lock().map_err(|_| TraceError::from("When shutting down the BatchSpanProcessor, the message sender's lock has been poisoned"))?;
let (res_sender, res_receiver) = oneshot::channel::<Vec<ExportResult>>();
sender.try_send(BatchMessage::Shutdown(res_sender))?;
for result in futures::executor::block_on(res_receiver)? {
result?;
}
Ok(())
}
}
#[derive(Debug)]
enum BatchMessage {
ExportSpan(SpanData),
Flush(Option<oneshot::Sender<Vec<ExportResult>>>),
Shutdown(oneshot::Sender<Vec<ExportResult>>),
}
impl BatchSpanProcessor {
pub(crate) fn new<R>(
mut exporter: Box<dyn SpanExporter>,
config: BatchConfig,
runtime: R,
) -> Self
where
R: Runtime + Clone + Send + Sync + 'static,
{
let (message_sender, message_receiver) = mpsc::channel(config.max_queue_size);
let ticker = runtime
.interval(config.scheduled_delay)
.map(|_| BatchMessage::Flush(None));
let timeout_runtime = runtime.clone();
// Spawn worker process via user-defined spawn function.
runtime.spawn(Box::pin(async move {
let mut spans = Vec::new();
let mut messages = Box::pin(futures::stream::select(message_receiver, ticker));
while let Some(message) = messages.next().await {
match message {
// Span has finished, add to buffer of pending spans.
BatchMessage::ExportSpan(span) => {
if spans.len() < config.max_queue_size {
spans.push(span);
}
}
// Span batch interval time reached or a force flush has been invoked, export current spans.
BatchMessage::Flush(Some(ch)) => {
let mut results =
Vec::with_capacity(spans.len() / config.max_export_batch_size + 1);
while !spans.is_empty() {
let batch = spans.split_off(
spans.len().saturating_sub(config.max_export_batch_size),
);
results.push(
export_with_timeout(
config.max_export_timeout,
exporter.as_mut(),
&timeout_runtime,
batch,
).await
);
}
let send_result = ch.send(results);
if send_result.is_err() {
global::handle_error(TraceError::from("fail to send the export response from worker handle in BatchProcessor"))
}
}
BatchMessage::Flush(None) => {
while !spans.is_empty() {
let batch = spans.split_off(
spans.len().saturating_sub(config.max_export_batch_size),
);
let result = export_with_timeout(
config.max_export_timeout,
exporter.as_mut(),
&timeout_runtime,
batch,
).await;
if let Err(err) = result {
global::handle_error(err);
}
}
}
// Stream has terminated or processor is shutdown, return to finish execution.
BatchMessage::Shutdown(ch) => {
let mut results =
Vec::with_capacity(spans.len() / config.max_export_batch_size + 1);
while !spans.is_empty() {
let batch = spans.split_off(
spans.len().saturating_sub(config.max_export_batch_size),
);
results.push(
export_with_timeout(
config.max_export_timeout,
exporter.as_mut(),
&timeout_runtime,
batch,
).await
);
}
exporter.shutdown();
let send_result = ch.send(results);
if send_result.is_err() {
global::handle_error(TraceError::from("fail to send the export response from worker handle in BatchProcessor"))
}
break;
}
}
}
}));
// Return batch processor with link to worker
BatchSpanProcessor {
message_sender: Mutex::new(message_sender),
}
}
/// Create a new batch processor builder
pub fn builder<E, R>(exporter: E, runtime: R) -> BatchSpanProcessorBuilder<E, R>
where
E: SpanExporter,
R: Runtime,
{
BatchSpanProcessorBuilder {
exporter,
config: BatchConfig::default(),
runtime,
}
}
}
async fn export_with_timeout<R, E>(
time_out: Duration,
exporter: &mut E,
runtime: &R,
batch: Vec<SpanData>,
) -> ExportResult
where
R: Runtime,
E: SpanExporter + ?Sized,
{
let export = exporter.export(batch);
let timeout = runtime.delay(time_out);
pin_mut!(export);
pin_mut!(timeout);
match futures::future::select(export, timeout).await {
Either::Left((export_res, _)) => export_res,
Either::Right((_, _)) => ExportResult::Err(TraceError::ExportTimedOut(time_out)),
}
}
/// Batch span processor configuration
#[derive(Debug)]
pub struct BatchConfig {
/// The maximum queue size to buffer spans for delayed processing. If the
/// queue gets full it drops the spans. The default value of is 2048.
max_queue_size: usize,
/// The delay interval in milliseconds between two consecutive processing
/// of batches. The default value is 5 seconds.
scheduled_delay: Duration,
/// The maximum number of spans to process in a single batch. If there are
/// more than one batch worth of spans then it processes multiple batches
/// of spans one batch after the other without any delay. The default value
/// is 512.
max_export_batch_size: usize,
/// The maximum duration to export a batch of data.
max_export_timeout: Duration,
}
impl Default for BatchConfig {
fn default() -> Self {
let mut config = BatchConfig {
max_queue_size: OTEL_BSP_MAX_QUEUE_SIZE_DEFAULT,
scheduled_delay: Duration::from_millis(OTEL_BSP_SCHEDULE_DELAY_DEFAULT),
max_export_batch_size: OTEL_BSP_MAX_EXPORT_BATCH_SIZE_DEFAULT,
max_export_timeout: Duration::from_millis(OTEL_BSP_EXPORT_TIMEOUT_DEFAULT),
};
if let Some(max_queue_size) = env::var(OTEL_BSP_MAX_QUEUE_SIZE)
.ok()
.and_then(|queue_size| usize::from_str(&queue_size).ok())
{
config.max_queue_size = max_queue_size;
}
if let Some(scheduled_delay) = env::var(OTEL_BSP_SCHEDULE_DELAY)
.ok()
.or_else(|| env::var("OTEL_BSP_SCHEDULE_DELAY_MILLIS").ok())
.and_then(|delay| u64::from_str(&delay).ok())
{
config.scheduled_delay = Duration::from_millis(scheduled_delay);
}
if let Some(max_export_batch_size) = env::var(OTEL_BSP_MAX_EXPORT_BATCH_SIZE)
.ok()
.and_then(|batch_size| usize::from_str(&batch_size).ok())
{
config.max_export_batch_size = max_export_batch_size;
}
// max export batch size must be less or equal to max queue size.
// we set max export batch size to max queue size if it's larger than max queue size.
if config.max_export_batch_size > config.max_queue_size {
config.max_export_batch_size = config.max_queue_size;
}
if let Some(max_export_timeout) = env::var(OTEL_BSP_EXPORT_TIMEOUT)
.ok()
.or_else(|| env::var("OTEL_BSP_EXPORT_TIMEOUT_MILLIS").ok())
.and_then(|timeout| u64::from_str(&timeout).ok())
{
config.max_export_timeout = Duration::from_millis(max_export_timeout);
}
config
}
}
/// A builder for creating [`BatchSpanProcessor`] instances.
///
#[derive(Debug)]
pub struct BatchSpanProcessorBuilder<E, R> {
exporter: E,
config: BatchConfig,
runtime: R,
}
impl<E, R> BatchSpanProcessorBuilder<E, R>
where
E: SpanExporter + 'static,
R: Runtime + Clone + Send + Sync + 'static,
{
/// Set max queue size for batches
pub fn with_max_queue_size(self, size: usize) -> Self {
let mut config = self.config;
config.max_queue_size = size;
BatchSpanProcessorBuilder { config, ..self }
}
/// Set scheduled delay for batches
pub fn with_scheduled_delay(self, delay: Duration) -> Self {
let mut config = self.config;
config.scheduled_delay = delay;
BatchSpanProcessorBuilder { config, ..self }
}
/// Set max timeout for exporting.
pub fn with_max_timeout(self, timeout: Duration) -> Self {
let mut config = self.config;
config.max_export_timeout = timeout;
BatchSpanProcessorBuilder { config, ..self }
}
/// Set max export size for batches, should always less than or equals to max queue size.
///
/// If input is larger than max queue size, will lower it to be equal to max queue size
pub fn with_max_export_batch_size(self, size: usize) -> Self {
let mut config = self.config;
if size > config.max_queue_size {
config.max_export_batch_size = config.max_queue_size;
} else {
config.max_export_batch_size = size;
}
BatchSpanProcessorBuilder { config, ..self }
}
/// Build a batch processor
pub fn build(self) -> BatchSpanProcessor {
BatchSpanProcessor::new(Box::new(self.exporter), self.config, self.runtime)
}
}
#[cfg(all(test, feature = "testing", feature = "trace"))]
mod tests {
use super::{
BatchSpanProcessor, SimpleSpanProcessor, SpanProcessor, OTEL_BSP_EXPORT_TIMEOUT,
OTEL_BSP_MAX_EXPORT_BATCH_SIZE, OTEL_BSP_MAX_QUEUE_SIZE, OTEL_BSP_MAX_QUEUE_SIZE_DEFAULT,
OTEL_BSP_SCHEDULE_DELAY, OTEL_BSP_SCHEDULE_DELAY_DEFAULT,
};
use crate::runtime;
use crate::sdk::export::trace::{stdout, ExportResult, SpanData, SpanExporter};
use crate::sdk::trace::BatchConfig;
use crate::testing::trace::{
new_test_export_span_data, new_test_exporter, new_tokio_test_exporter,
};
use async_trait::async_trait;
use futures::Future;
use std::fmt::Debug;
use std::time::Duration;
#[test]
fn simple_span_processor_on_end_calls_export() {
let (exporter, rx_export, _rx_shutdown) = new_test_exporter();
let processor = SimpleSpanProcessor::new(Box::new(exporter));
processor.on_end(new_test_export_span_data());
assert!(rx_export.try_recv().is_ok());
}
#[test]
fn simple_span_processor_shutdown_calls_shutdown() {
let (exporter, _rx_export, rx_shutdown) = new_test_exporter();
let mut processor = SimpleSpanProcessor::new(Box::new(exporter));
let _result = processor.shutdown();
assert!(rx_shutdown.try_recv().is_ok());
}
#[test]
fn test_build_batch_span_processor_builder() {
std::env::set_var(OTEL_BSP_MAX_EXPORT_BATCH_SIZE, "500");
std::env::set_var(OTEL_BSP_EXPORT_TIMEOUT, "2046");
std::env::set_var(OTEL_BSP_SCHEDULE_DELAY, "I am not number");
let mut builder = BatchSpanProcessor::builder(
stdout::Exporter::new(std::io::stdout(), true),
runtime::Tokio,
);
// export batch size cannot exceed max queue size
assert_eq!(builder.config.max_export_batch_size, 500);
assert_eq!(
builder.config.scheduled_delay,
Duration::from_millis(OTEL_BSP_SCHEDULE_DELAY_DEFAULT)
);
assert_eq!(
builder.config.max_queue_size,
OTEL_BSP_MAX_QUEUE_SIZE_DEFAULT
);
assert_eq!(
builder.config.max_export_timeout,
Duration::from_millis(2046)
);
std::env::set_var(OTEL_BSP_MAX_QUEUE_SIZE, "120");
builder = BatchSpanProcessor::builder(
stdout::Exporter::new(std::io::stdout(), true),
runtime::Tokio,
);
assert_eq!(builder.config.max_export_batch_size, 120);
assert_eq!(builder.config.max_queue_size, 120);
}
#[tokio::test]
async fn test_batch_span_processor() {
let (exporter, mut export_receiver, _shutdown_receiver) = new_tokio_test_exporter();
let config = BatchConfig {
scheduled_delay: Duration::from_secs(60 * 60 * 24), // set the tick to 24 hours so we know the span must be exported via force_flush
..Default::default()
};
let mut processor =
BatchSpanProcessor::new(Box::new(exporter), config, runtime::TokioCurrentThread);
let handle = tokio::spawn(async move {
loop {
if let Some(span) = export_receiver.recv().await {
assert_eq!(span.span_context, new_test_export_span_data().span_context);
break;
}
}
});
tokio::time::sleep(Duration::from_secs(1)).await; // skip the first
processor.on_end(new_test_export_span_data());
let flush_res = processor.force_flush();
assert!(flush_res.is_ok());
let _shutdown_result = processor.shutdown();
assert!(
tokio::time::timeout(Duration::from_secs(5), handle)
.await
.is_ok(),
"timed out in 5 seconds. force_flush may not export any data when called"
);
}
struct BlockingExporter<D> {
delay_for: Duration,
delay_fn: D,
}
impl<D, DS> Debug for BlockingExporter<D>
where
D: Fn(Duration) -> DS + 'static + Send + Sync,
DS: Future<Output = ()> + Send + Sync + 'static,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("blocking exporter for testing")
}
}
#[async_trait]
impl<D, DS> SpanExporter for BlockingExporter<D>
where
D: Fn(Duration) -> DS + 'static + Send + Sync,
DS: Future<Output = ()> + Send + Sync + 'static,
{
async fn export(&mut self, _batch: Vec<SpanData>) -> ExportResult {
(self.delay_fn)(self.delay_for).await;
Ok(())
}
}
#[test]
fn test_timeout_tokio_timeout() {
// If time_out is true, then we ask exporter to block for 60s and set timeout to 5s.
// If time_out is false, then we ask the exporter to block for 5s and set timeout to 60s.
// Either way, the test should be finished within 5s.
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
runtime.block_on(timeout_test_tokio(true));
}
#[test]
fn test_timeout_tokio_not_timeout() {
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
runtime.block_on(timeout_test_tokio(false));
}
#[test]
#[cfg(feature = "rt-async-std")]
fn test_timeout_async_std_timeout() {
async_std::task::block_on(timeout_test_std_async(true));
}
#[test]
#[cfg(feature = "rt-async-std")]
fn test_timeout_async_std_not_timeout() {
async_std::task::block_on(timeout_test_std_async(false));
}
// If the time_out is true, then the result suppose to ended with timeout.
// otherwise the exporter should be able to export within time out duration.
#[cfg(feature = "rt-async-std")]
async fn timeout_test_std_async(time_out: bool) {
let config = BatchConfig {
max_export_timeout: Duration::from_millis(if time_out { 5 } else { 60 }),
scheduled_delay: Duration::from_secs(60 * 60 * 24), // set the tick to 24 hours so we know the span must be exported via force_flush
..Default::default()
};
let exporter = BlockingExporter {
delay_for: Duration::from_millis(if !time_out { 5 } else { 60 }),
delay_fn: async_std::task::sleep,
};
let mut processor = BatchSpanProcessor::new(Box::new(exporter), config, runtime::AsyncStd);
processor.on_end(new_test_export_span_data());
let flush_res = processor.force_flush();
if time_out {
assert!(flush_res.is_err());
} else {
assert!(flush_res.is_ok());
}
let shutdown_res = processor.shutdown();
assert!(shutdown_res.is_ok());
}
// If the time_out is true, then the result suppose to ended with timeout.
// otherwise the exporter should be able to export within time out duration.
async fn timeout_test_tokio(time_out: bool) {
let config = BatchConfig {
max_export_timeout: Duration::from_millis(if time_out { 5 } else { 60 }),
scheduled_delay: Duration::from_secs(60 * 60 * 24), // set the tick to 24 hours so we know the span must be exported via force_flush,
..Default::default()
};
let exporter = BlockingExporter {
delay_for: Duration::from_millis(if !time_out { 5 } else { 60 }),
delay_fn: tokio::time::sleep,
};
let mut processor =
BatchSpanProcessor::new(Box::new(exporter), config, runtime::TokioCurrentThread);
tokio::time::sleep(Duration::from_secs(1)).await; // skip the first
processor.on_end(new_test_export_span_data());
let flush_res = processor.force_flush();
if time_out {
assert!(flush_res.is_err());
} else {
assert!(flush_res.is_ok());
}
let shutdown_res = processor.shutdown();
assert!(shutdown_res.is_ok());
}
}
| {
exporter.shutdown();
Ok(())
} |
build.rs | use cmake::Config;
use std::env;
fn harfbuzz() |
fn main() {
harfbuzz();
let out_dir = env::var("OUT_DIR").unwrap();
println!("cargo:outdir={}", out_dir);
}
| {
let mut config = Config::new("harfbuzz");
for (key, value) in std::env::vars() {
println!("{}: {}", key, value);
}
let ft_outdir = std::env::var("DEP_FREETYPE_OUTDIR").unwrap();
let dst = config
.env("CMAKE_PREFIX_PATH", &ft_outdir)
.cxxflag("-DHB_NO_PRAGMA_GCC_DIAGNOSTIC_ERROR")
.define("HB_HAVE_FREETYPE", "ON")
.define("HB_BUILD_TESTS", "OFF")
.define(
"FREETYPE_LIBRARY",
std::env::var("DEP_FREETYPE_LIB").unwrap(),
)
.define(
"FREETYPE_INCLUDE_DIR_ft2build",
std::env::var("DEP_FREETYPE_INCLUDE").unwrap(),
)
.define(
"FREETYPE_INCLUDE_DIR_freetype2",
std::env::var("DEP_FREETYPE_INCLUDE").unwrap(),
)
.profile("Release")
.build();
println!("cargo:rustc-link-search=native={}/lib", ft_outdir);
println!("cargo:rustc-link-search=native={}/lib", dst.display());
println!("cargo:rustc-link-lib=static=harfbuzz");
println!("cargo:rustc-link-search=native=/usr/lib");
} |
Cart.js | import React, { useEffect, useState } from 'react';
import { Link } from 'react-router-dom';
import { useDispatch, useSelector } from 'react-redux';
import { Row, Col, ListGroup, Image, Form, Button, Card, Container, Modal } from 'react-bootstrap';
import ErrorMessage from '../components/ErrorMessage';
import Loading from '../components/Loading';
import { removeFromCart, getCart } from '../actions/userReducerActions';
import Snackbar from '../components/Snackbar';
import { createOrderFromCart } from '../actions/orderReducerActions';
import Meta from '../components/Meta';
const Cart = ({ history }) => {
const [open, setOpen] = useState(false);
const [placeOrderPrompt, setPlaceOrderPrompt] = useState(false);
const dispatch = useDispatch();
const userInfo = useSelector((state) => state.userInfo);
const { loading, isLogged, error, removeFromCartSuccess, user } = userInfo;
const ordersInfo = useSelector((state) => state.ordersInfo);
const { loading: orderLoading, created, error: orderError, createdOrder } = ordersInfo;
useEffect(() => {
if (open) {
setTimeout(() => setOpen(false), 3000);
}
}, [open]);
useEffect(() => {
if (removeFromCartSuccess) {
setOpen(true);
dispatch({ type: 'USER_CART_RESET' });
}
}, [dispatch, removeFromCartSuccess]);
useEffect(() => {
if (!isLogged) {
history.push('/');
} else {
dispatch(getCart());
}
}, [dispatch, history, isLogged]);
useEffect(() => {
if (createdOrder) {
history.push(`orders/${createdOrder.orderID}`);
}
}, [history, createdOrder]);
const removeTicketFromCart = (eventID) => {
dispatch(removeFromCart(eventID));
};
const placeOrderHandler = () => {
setPlaceOrderPrompt(true);
};
const proceedToCheckoutHandler = () => {
setPlaceOrderPrompt(false);
dispatch(createOrderFromCart());
};
return (
<>
<Meta title='Eventify | My Cart' />
<Container>
{orderLoading ? (
<Loading />
) : orderError ? (
<ErrorMessage variant='danger'>{orderError}</ErrorMessage>
) : (
<>
{error && <ErrorMessage variant='danger'>{error}</ErrorMessage>}
{open && <Snackbar open={open} variant='success' message='Removed Item From Cart' />}
{placeOrderPrompt && (
<Modal show={placeOrderPrompt}>
<Modal.Header>
<Modal.Title>Attention!</Modal.Title>
</Modal.Header>
<Modal.Body>
Placing an order will clear your cart contents and you'll be redirected to
Checkout to finish your payment process, If you wish to add or remove your
cart contents, you may close this window and do so now.
</Modal.Body>
<Modal.Footer>
<Button variant='secondary' onClick={() => setPlaceOrderPrompt(false)}>
Close
</Button>
<Button variant='primary' onClick={proceedToCheckoutHandler}>
Proceed To Checkout
</Button>
</Modal.Footer>
</Modal>
)}
<Row>
{loading ? (
<Loading />
) : (
<>
<Col md={8} className='mb-3'>
<h1>Ticket Cart</h1>
{isLogged && user.cart.length === 0 ? (
<ErrorMessage variant='info'>
Your cart is empty <Link to='/'>Go Home</Link>
</ErrorMessage>
) : (
isLogged && (
<>
<ListGroup variant='flush'>
{user.cart.map((ticket) => (
<ListGroup.Item key={ticket._id}>
<Row>
<Col xs={6} md={6}>
<Link
to={`/event/details/${ticket.eventID}`}
>
{ticket.name}
</Link>
</Col>
<Col xs={4} md={4}> | <Col xs={2} md={1}>
<Button
type='button'
variant='light'
onClick={() =>
removeTicketFromCart(
ticket.eventID
)
}
>
<i className='fas fa-trash'></i>
</Button>
</Col>
</Row>
</ListGroup.Item>
))}
</ListGroup>
</>
)
)}
</Col>
{isLogged && (
<Col md={4}>
<ListGroup variant='flush'>
<ListGroup.Item>
<h2>
Subtotal (
{user.cart.reduce((acc, ticket) => acc + 1, 0)})
tickets
</h2>
</ListGroup.Item>
<ListGroup.Item>
<Row>
<Col xs={9} md={9}>
Items Total:
</Col>
<Col>
$
{user.cart
.reduce(
(acc, ticket) => acc + ticket.ticketPrice,
0
)
.toFixed(2)}
</Col>
</Row>
</ListGroup.Item>
<ListGroup.Item>
<Row>
<Col xs={9} md={9}>
Fees:
</Col>
<Col>
$
{user.cart
.reduce(
(acc, ticket) =>
acc + 0.05 * ticket.ticketPrice,
0
)
.toFixed(2)}
</Col>
</Row>
</ListGroup.Item>
<ListGroup.Item>
<Row>
<Col xs={9} md={9}>
Total:
</Col>
<Col>
$
{user.cart
.reduce(
(acc, ticket) =>
acc + 1.05 * ticket.ticketPrice,
0
)
.toFixed(2)}
</Col>
</Row>
</ListGroup.Item>
<ListGroup.Item>
<Button
type='button'
className='btn-block'
disabled={user.cart.length === 0 || loading}
onClick={placeOrderHandler}
>
Place Order
</Button>
</ListGroup.Item>
</ListGroup>
</Col>
)}
</>
)}
</Row>
</>
)}
</Container>
</>
);
};
export default Cart; | ${ticket.ticketPrice}
</Col>
|
dnn_linear_combined.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow estimators for Linear and DNN joined training models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import six
from tensorflow.python.estimator import estimator
from tensorflow.python.estimator.canned import dnn
from tensorflow.python.estimator.canned import head as head_lib
from tensorflow.python.estimator.canned import linear
from tensorflow.python.estimator.canned import optimizers
from tensorflow.python.feature_column import feature_column_v2
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import losses
from tensorflow.python.summary import summary
from tensorflow.python.training import sync_replicas_optimizer
from tensorflow.python.training import training_util
from tensorflow.python.util.tf_export import estimator_export
# The default learning rates are a historical artifact of the initial
# implementation.
_DNN_LEARNING_RATE = 0.001
_LINEAR_LEARNING_RATE = 0.005
def _check_no_sync_replicas_optimizer(optimizer):
if isinstance(optimizer, sync_replicas_optimizer.SyncReplicasOptimizer):
raise ValueError(
'SyncReplicasOptimizer does not support multi optimizers case. '
'Therefore, it is not supported in DNNLinearCombined model. '
'If you want to use this optimizer, please use either DNN or Linear '
'model.')
def _linear_learning_rate(num_linear_feature_columns):
"""Returns the default learning rate of the linear model.
The calculation is a historical artifact of this initial implementation, but
has proven a reasonable choice.
Args:
num_linear_feature_columns: The number of feature columns of the linear
model.
Returns:
A float.
"""
default_learning_rate = 1. / math.sqrt(num_linear_feature_columns)
return min(_LINEAR_LEARNING_RATE, default_learning_rate)
def _add_layer_summary(value, tag):
summary.scalar('%s/fraction_of_zero_values' % tag, nn.zero_fraction(value))
summary.histogram('%s/activation' % tag, value)
def _dnn_linear_combined_model_fn(features,
labels,
mode,
head,
linear_feature_columns=None,
linear_optimizer='Ftrl',
dnn_feature_columns=None,
dnn_optimizer='Adagrad',
dnn_hidden_units=None,
dnn_activation_fn=nn.relu,
dnn_dropout=None,
input_layer_partitioner=None,
config=None,
batch_norm=False,
linear_sparse_combiner='sum'):
"""Deep Neural Net and Linear combined model_fn.
Args:
features: dict of `Tensor`.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of dtype
`int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
head: A `Head` instance.
linear_feature_columns: An iterable containing all the feature columns used
by the Linear model.
linear_optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training the Linear model. Defaults to the Ftrl
optimizer.
dnn_feature_columns: An iterable containing all the feature columns used by
the DNN model.
dnn_optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training the DNN model. Defaults to the Adagrad
optimizer.
dnn_hidden_units: List of hidden units per DNN layer.
dnn_activation_fn: Activation function applied to each DNN layer. If `None`,
will use `tf.nn.relu`.
dnn_dropout: When not `None`, the probability we will drop out a given DNN
coordinate.
input_layer_partitioner: Partitioner for input layer.
config: `RunConfig` object to configure the runtime settings.
batch_norm: Whether to use batch normalization after each hidden layer.
linear_sparse_combiner: A string specifying how to reduce the linear model
if a categorical column is multivalent. One of "mean", "sqrtn", and
"sum".
Returns:
An `EstimatorSpec` instance.
Raises:
ValueError: If both `linear_feature_columns` and `dnn_features_columns`
are empty at the same time, or `input_layer_partitioner` is missing,
or features has the wrong type.
"""
if not isinstance(features, dict):
raise ValueError('features should be a dictionary of `Tensor`s. '
'Given type: {}'.format(type(features)))
if not linear_feature_columns and not dnn_feature_columns:
raise ValueError(
'Either linear_feature_columns or dnn_feature_columns must be defined.')
num_ps_replicas = config.num_ps_replicas if config else 0
input_layer_partitioner = input_layer_partitioner or (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20))
shared_state_manager = feature_column_v2.maybe_create_shared_state_manager(
list(linear_feature_columns) + list(dnn_feature_columns))
# Build DNN Logits.
dnn_parent_scope = 'dnn'
if not dnn_feature_columns:
dnn_logits = None
else:
dnn_optimizer = optimizers.get_optimizer_instance(
dnn_optimizer, learning_rate=_DNN_LEARNING_RATE)
_check_no_sync_replicas_optimizer(dnn_optimizer)
if not dnn_hidden_units:
raise ValueError(
'dnn_hidden_units must be defined when dnn_feature_columns is '
'specified.')
dnn_partitioner = (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas))
with variable_scope.variable_scope(
dnn_parent_scope,
values=tuple(six.itervalues(features)),
partitioner=dnn_partitioner) as scope:
dnn_absolute_scope = scope.name
dnn_logit_fn = dnn._dnn_logit_fn_builder( # pylint: disable=protected-access
units=head.logits_dimension,
hidden_units=dnn_hidden_units,
feature_columns=dnn_feature_columns,
activation_fn=dnn_activation_fn,
dropout=dnn_dropout,
batch_norm=batch_norm,
input_layer_partitioner=input_layer_partitioner,
shared_state_manager=shared_state_manager)
dnn_logits = dnn_logit_fn(features=features, mode=mode)
linear_parent_scope = 'linear'
if not linear_feature_columns:
linear_logits = None
else:
linear_optimizer = optimizers.get_optimizer_instance(
linear_optimizer,
learning_rate=_linear_learning_rate(len(linear_feature_columns)))
_check_no_sync_replicas_optimizer(linear_optimizer)
with variable_scope.variable_scope(
linear_parent_scope,
values=tuple(six.itervalues(features)),
partitioner=input_layer_partitioner) as scope:
linear_absolute_scope = scope.name
logit_fn = linear._linear_logit_fn_builder( # pylint: disable=protected-access
units=head.logits_dimension,
feature_columns=linear_feature_columns,
sparse_combiner=linear_sparse_combiner)
linear_logits = logit_fn(features=features)
_add_layer_summary(linear_logits, scope.name)
# Combine logits and build full model.
if dnn_logits is not None and linear_logits is not None:
logits = dnn_logits + linear_logits
elif dnn_logits is not None:
logits = dnn_logits
else:
logits = linear_logits
def _train_op_fn(loss):
"""Returns the op to optimize the loss."""
train_ops = []
global_step = training_util.get_global_step()
if dnn_logits is not None:
train_ops.append(
dnn_optimizer.minimize(
loss,
var_list=ops.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES,
scope=dnn_absolute_scope)))
if linear_logits is not None:
train_ops.append(
linear_optimizer.minimize(
loss,
var_list=ops.get_collection(
ops.GraphKeys.TRAINABLE_VARIABLES,
scope=linear_absolute_scope)))
train_op = control_flow_ops.group(*train_ops)
with ops.control_dependencies([train_op]):
return state_ops.assign_add(global_step, 1).op
return head.create_estimator_spec(
features=features,
mode=mode,
labels=labels,
train_op_fn=_train_op_fn,
logits=logits)
@estimator_export('estimator.DNNLinearCombinedClassifier')
class DNNLinearCombinedClassifier(estimator.Estimator):
"""An estimator for TensorFlow Linear and DNN joined classification models.
Note: This estimator is also known as wide-n-deep.
Example:
```python
numeric_feature = numeric_column(...)
categorical_column_a = categorical_column_with_hash_bucket(...)
categorical_column_b = categorical_column_with_hash_bucket(...)
categorical_feature_a_x_categorical_feature_b = crossed_column(...)
categorical_feature_a_emb = embedding_column(
categorical_column=categorical_feature_a, ...)
categorical_feature_b_emb = embedding_column(
categorical_id_column=categorical_feature_b, ...)
estimator = DNNLinearCombinedClassifier(
# wide settings
linear_feature_columns=[categorical_feature_a_x_categorical_feature_b],
linear_optimizer=tf.train.FtrlOptimizer(...),
# deep settings
dnn_feature_columns=[
categorical_feature_a_emb, categorical_feature_b_emb,
numeric_feature],
dnn_hidden_units=[1000, 500, 100],
dnn_optimizer=tf.train.ProximalAdagradOptimizer(...),
# warm-start settings
warm_start_from="/path/to/checkpoint/dir")
# To apply L1 and L2 regularization, you can set dnn_optimizer to:
tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.001)
# To apply learning rate decay, you can set dnn_optimizer to a callable:
lambda: tf.AdamOptimizer(
learning_rate=tf.exponential_decay(
learning_rate=0.1,
global_step=tf.get_global_step(),
decay_steps=10000,
decay_rate=0.96)
# It is the same for linear_optimizer.
# Input builders
def input_fn_train: # returns x, y
pass
estimator.train(input_fn=input_fn_train, steps=100)
def input_fn_eval: # returns x, y
pass
metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
def input_fn_predict: # returns x, None
pass
predictions = estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* for each `column` in `dnn_feature_columns` + `linear_feature_columns`:
- if `column` is a `_CategoricalColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `_WeightedCategoricalColumn`, two features: the first
with `key` the id column name, the second with `key` the weight column
name. Both features' `value` must be a `SparseTensor`.
- if `column` is a `_DenseColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss is calculated by using softmax cross entropy.
@compatibility(eager)
Estimators can be used while eager execution is enabled. Note that `input_fn`
and all hooks are executed inside a graph context, so they have to be written
to be compatible with graph mode. Note that `input_fn` code using `tf.data`
generally works in both graph and eager modes.
@end_compatibility
"""
def __init__(self,
model_dir=None,
linear_feature_columns=None,
linear_optimizer='Ftrl',
dnn_feature_columns=None,
dnn_optimizer='Adagrad',
dnn_hidden_units=None,
dnn_activation_fn=nn.relu,
dnn_dropout=None,
n_classes=2,
weight_column=None,
label_vocabulary=None,
input_layer_partitioner=None,
config=None,
warm_start_from=None,
loss_reduction=losses.Reduction.SUM,
batch_norm=False,
linear_sparse_combiner='sum'):
"""Initializes a DNNLinearCombinedClassifier instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
linear_feature_columns: An iterable containing all the feature columns
used by linear part of the model. All items in the set must be
instances of classes derived from `FeatureColumn`.
linear_optimizer: An instance of `tf.Optimizer` used to apply gradients to
the linear part of the model. Can also be a string (one of 'Adagrad',
'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to FTRL
optimizer.
dnn_feature_columns: An iterable containing all the feature columns used
by deep part of the model. All items in the set must be instances of
classes derived from `FeatureColumn`.
dnn_optimizer: An instance of `tf.Optimizer` used to apply gradients to
the deep part of the model. Can also be a string (one of 'Adagrad',
'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to Adagrad
optimizer.
dnn_hidden_units: List of hidden units per layer. All layers are fully
connected.
dnn_activation_fn: Activation function applied to each layer. If None,
will use `tf.nn.relu`.
dnn_dropout: When not None, the probability we will drop out
a given coordinate.
n_classes: Number of label classes. Defaults to 2, namely binary
classification. Must be > 1.
weight_column: A string or a `_NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example. If it is a string, it is
used as a key to fetch weight tensor from the `features`. If it is a
`_NumericColumn`, raw tensor is fetched by key `weight_column.key`,
then weight_column.normalizer_fn is applied on it to get weight tensor.
label_vocabulary: A list of strings represents possible label values. If
given, labels must be string type and have any value in
`label_vocabulary`. If it is not given, that means labels are
already encoded as integer or float within [0, 1] for `n_classes=2` and
encoded as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 .
Also there will be errors if vocabulary is not provided and labels are
string.
input_layer_partitioner: Partitioner for input layer. Defaults to
`min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: RunConfig object to configure the runtime settings.
warm_start_from: A string filepath to a checkpoint to warm-start from, or
a `WarmStartSettings` object to fully configure warm-starting. If the
string filepath is provided instead of a `WarmStartSettings`, then all
weights are warm-started, and it is assumed that vocabularies and Tensor
names are unchanged.
loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
to reduce training loss over batch. Defaults to `SUM`.
batch_norm: Whether to use batch normalization after each hidden layer.
linear_sparse_combiner: A string specifying how to reduce the linear model
if a categorical column is multivalent. One of "mean", "sqrtn", and
"sum" -- these are effectively different ways to do example-level
normalization, which can be useful for bag-of-words features. For more
details, see `tf.feature_column.linear_model`.
Raises:
ValueError: If both linear_feature_columns and dnn_features_columns are
empty at the same time.
"""
linear_feature_columns = linear_feature_columns or []
dnn_feature_columns = dnn_feature_columns or []
self._feature_columns = (
list(linear_feature_columns) + list(dnn_feature_columns))
if not self._feature_columns:
raise ValueError('Either linear_feature_columns or dnn_feature_columns '
'must be defined.')
if n_classes == 2:
head = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss( # pylint: disable=protected-access
weight_column=weight_column,
label_vocabulary=label_vocabulary,
loss_reduction=loss_reduction)
else:
head = head_lib._multi_class_head_with_softmax_cross_entropy_loss( # pylint: disable=protected-access
n_classes,
weight_column=weight_column,
label_vocabulary=label_vocabulary,
loss_reduction=loss_reduction)
def | (features, labels, mode, config):
"""Call the _dnn_linear_combined_model_fn."""
return _dnn_linear_combined_model_fn(
features=features,
labels=labels,
mode=mode,
head=head,
linear_feature_columns=linear_feature_columns,
linear_optimizer=linear_optimizer,
dnn_feature_columns=dnn_feature_columns,
dnn_optimizer=dnn_optimizer,
dnn_hidden_units=dnn_hidden_units,
dnn_activation_fn=dnn_activation_fn,
dnn_dropout=dnn_dropout,
input_layer_partitioner=input_layer_partitioner,
config=config,
batch_norm=batch_norm,
linear_sparse_combiner=linear_sparse_combiner)
super(DNNLinearCombinedClassifier, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config,
warm_start_from=warm_start_from)
@estimator_export('estimator.DNNLinearCombinedRegressor')
class DNNLinearCombinedRegressor(estimator.Estimator):
"""An estimator for TensorFlow Linear and DNN joined models for regression.
Note: This estimator is also known as wide-n-deep.
Example:
```python
numeric_feature = numeric_column(...)
categorical_column_a = categorical_column_with_hash_bucket(...)
categorical_column_b = categorical_column_with_hash_bucket(...)
categorical_feature_a_x_categorical_feature_b = crossed_column(...)
categorical_feature_a_emb = embedding_column(
categorical_column=categorical_feature_a, ...)
categorical_feature_b_emb = embedding_column(
categorical_column=categorical_feature_b, ...)
estimator = DNNLinearCombinedRegressor(
# wide settings
linear_feature_columns=[categorical_feature_a_x_categorical_feature_b],
linear_optimizer=tf.train.FtrlOptimizer(...),
# deep settings
dnn_feature_columns=[
categorical_feature_a_emb, categorical_feature_b_emb,
numeric_feature],
dnn_hidden_units=[1000, 500, 100],
dnn_optimizer=tf.train.ProximalAdagradOptimizer(...),
# warm-start settings
warm_start_from="/path/to/checkpoint/dir")
# To apply L1 and L2 regularization, you can set dnn_optimizer to:
tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.001)
# To apply learning rate decay, you can set dnn_optimizer to a callable:
lambda: tf.AdamOptimizer(
learning_rate=tf.exponential_decay(
learning_rate=0.1,
global_step=tf.get_global_step(),
decay_steps=10000,
decay_rate=0.96)
# It is the same for linear_optimizer.
# Input builders
def input_fn_train: # returns x, y
pass
estimator.train(input_fn=input_fn_train, steps=100)
def input_fn_eval: # returns x, y
pass
metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
def input_fn_predict: # returns x, None
pass
predictions = estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* for each `column` in `dnn_feature_columns` + `linear_feature_columns`:
- if `column` is a `_CategoricalColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `_WeightedCategoricalColumn`, two features: the first
with `key` the id column name, the second with `key` the weight column
name. Both features' `value` must be a `SparseTensor`.
- if `column` is a `_DenseColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss is calculated by using mean squared error.
@compatibility(eager)
Estimators can be used while eager execution is enabled. Note that `input_fn`
and all hooks are executed inside a graph context, so they have to be written
to be compatible with graph mode. Note that `input_fn` code using `tf.data`
generally works in both graph and eager modes.
@end_compatibility
"""
def __init__(self,
model_dir=None,
linear_feature_columns=None,
linear_optimizer='Ftrl',
dnn_feature_columns=None,
dnn_optimizer='Adagrad',
dnn_hidden_units=None,
dnn_activation_fn=nn.relu,
dnn_dropout=None,
label_dimension=1,
weight_column=None,
input_layer_partitioner=None,
config=None,
warm_start_from=None,
loss_reduction=losses.Reduction.SUM,
batch_norm=False,
linear_sparse_combiner='sum'):
"""Initializes a DNNLinearCombinedRegressor instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
linear_feature_columns: An iterable containing all the feature columns
used by linear part of the model. All items in the set must be
instances of classes derived from `FeatureColumn`.
linear_optimizer: An instance of `tf.Optimizer` used to apply gradients to
the linear part of the model. Can also be a string (one of 'Adagrad',
'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to FTRL
optimizer.
dnn_feature_columns: An iterable containing all the feature columns used
by deep part of the model. All items in the set must be instances of
classes derived from `FeatureColumn`.
dnn_optimizer: An instance of `tf.Optimizer` used to apply gradients to
the deep part of the model. Can also be a string (one of 'Adagrad',
'Adam', 'Ftrl', 'RMSProp', 'SGD'), or callable. Defaults to Adagrad
optimizer.
dnn_hidden_units: List of hidden units per layer. All layers are fully
connected.
dnn_activation_fn: Activation function applied to each layer. If None,
will use `tf.nn.relu`.
dnn_dropout: When not None, the probability we will drop out
a given coordinate.
label_dimension: Number of regression targets per example. This is the
size of the last dimension of the labels and logits `Tensor` objects
(typically, these have shape `[batch_size, label_dimension]`).
weight_column: A string or a `_NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example. If it is a string, it is
used as a key to fetch weight tensor from the `features`. If it is a
`_NumericColumn`, raw tensor is fetched by key `weight_column.key`,
then weight_column.normalizer_fn is applied on it to get weight tensor.
input_layer_partitioner: Partitioner for input layer. Defaults to
`min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: RunConfig object to configure the runtime settings.
warm_start_from: A string filepath to a checkpoint to warm-start from, or
a `WarmStartSettings` object to fully configure warm-starting. If the
string filepath is provided instead of a `WarmStartSettings`, then all
weights are warm-started, and it is assumed that vocabularies and Tensor
names are unchanged.
loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
to reduce training loss over batch. Defaults to `SUM`.
batch_norm: Whether to use batch normalization after each hidden layer.
linear_sparse_combiner: A string specifying how to reduce the linear model
if a categorical column is multivalent. One of "mean", "sqrtn", and
"sum" -- these are effectively different ways to do example-level
normalization, which can be useful for bag-of-words features. For more
details, see `tf.feature_column.linear_model`.
Raises:
ValueError: If both linear_feature_columns and dnn_features_columns are
empty at the same time.
"""
linear_feature_columns = linear_feature_columns or []
dnn_feature_columns = dnn_feature_columns or []
self._feature_columns = (
list(linear_feature_columns) + list(dnn_feature_columns))
if not self._feature_columns:
raise ValueError('Either linear_feature_columns or dnn_feature_columns '
'must be defined.')
def _model_fn(features, labels, mode, config):
"""Call the _dnn_linear_combined_model_fn."""
return _dnn_linear_combined_model_fn(
features=features,
labels=labels,
mode=mode,
head=head_lib._regression_head( # pylint: disable=protected-access
label_dimension=label_dimension, weight_column=weight_column,
loss_reduction=loss_reduction),
linear_feature_columns=linear_feature_columns,
linear_optimizer=linear_optimizer,
dnn_feature_columns=dnn_feature_columns,
dnn_optimizer=dnn_optimizer,
dnn_hidden_units=dnn_hidden_units,
dnn_activation_fn=dnn_activation_fn,
dnn_dropout=dnn_dropout,
input_layer_partitioner=input_layer_partitioner,
config=config,
batch_norm=batch_norm,
linear_sparse_combiner=linear_sparse_combiner)
super(DNNLinearCombinedRegressor, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config,
warm_start_from=warm_start_from)
| _model_fn |
ClipboardLink20RegularIcon.tsx | import * as React from 'react';
const ClipboardLink20RegularIcon = () => { | <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.08535 3C7.29127 2.4174 7.84689 2 8.5 2H11.5C12.1531 2 12.7087 2.4174 12.9146 3H14.5C15.3284 3 16 3.67157 16 4.5V11H15V4.5C15 4.22386 14.7761 4 14.5 4H12.9146C12.7087 4.5826 12.1531 5 11.5 5H8.5C7.84689 5 7.29127 4.5826 7.08535 4H5.5C5.22386 4 5 4.22386 5 4.5V16.5C5 16.7761 5.22386 17 5.5 17H8.25606C8.3819 17.356 8.55127 17.6915 8.75777 18H5.5C4.67157 18 4 17.3284 4 16.5V4.5C4 3.67157 4.67157 3 5.5 3H7.08535ZM8.5 3C8.22386 3 8 3.22386 8 3.5C8 3.77614 8.22386 4 8.5 4H11.5C11.7761 4 12 3.77614 12 3.5C12 3.22386 11.7761 3 11.5 3H8.5Z" fill="#212121"/>
<path d="M19 15.5C19 13.567 17.433 12 15.5 12L15.4101 12.0081C15.1769 12.0504 15 12.2545 15 12.5C15 12.7761 15.2239 13 15.5 13L15.6644 13.0053C16.9685 13.09 18 14.1745 18 15.5C18 16.8807 16.8807 18 15.5 18L15.4977 18.0046L15.4079 18.0131C15.1748 18.0565 14.9989 18.2614 15 18.5069C15.0013 18.783 15.2262 19.0059 15.5023 19.0046V19L15.6941 18.9948C17.5357 18.8951 19 17.3686 19 15.5Z" fill="#212121"/>
<path d="M13 12.5C13 12.2239 12.7761 12 12.5 12L12.308 12.0052C10.4643 12.1049 9 13.6314 9 15.5C9 17.433 10.567 19 12.5 19L12.5899 18.9919C12.8231 18.9496 13 18.7455 13 18.5C13 18.2239 12.7761 18 12.5 18L12.3356 17.9947C11.0315 17.91 10 16.8255 10 15.5C10 14.1193 11.1193 13 12.5 13L12.5899 12.9919C12.8231 12.9496 13 12.7455 13 12.5Z" fill="#212121"/>
<path d="M15.5002 15.0023L12.5002 15L12.4104 15.008C12.1771 15.0502 12.0001 15.2543 12 15.4998C11.9999 15.7759 12.2236 15.9999 12.4998 16L15.4998 16.0023L15.5897 15.9943C15.8229 15.9521 15.9999 15.748 16 15.5025C16.0001 15.2264 15.7764 15.0024 15.5002 15.0023Z" fill="#212121"/>
</svg>
)};
export default ClipboardLink20RegularIcon; | return( |
image_resource.rs | use crate::graphics::rendergraph::*;
#[derive(Debug)]
pub struct ImageResource {
pub id: u32,
pub name: String, | impl ImageResource {
pub fn new(id: u32, name: String, description: ImageDescription) -> Self {
Self {
id,
name,
description,
}
}
} | pub description: ImageDescription,
}
|
dataloader_infer.py | """
Copyright (c) 2021 TU Darmstadt
Author: Nikita Araslanov <[email protected]>
License: Apache License 2.0
"""
import os
import torch
from PIL import Image
import numpy as np
import torchvision.transforms as tf
from .dataloader_base import DLBase
class DataSeg(DLBase):
def __init__(self, cfg, split, ignore_labels=[], \
root=os.path.expanduser('./data'), renorm=False):
super(DataSeg, self).__init__()
self.cfg = cfg
self.root = root
self.split = split
self.ignore_labels = ignore_labels
self._init_palette(self.cfg.DATASET.NUM_CLASSES)
# train/val/test splits are pre-cut
split_fn = os.path.join(self.root, self.split + ".txt")
assert os.path.isfile(split_fn)
self.sequence_ids = []
self.sequence_names = []
def add_sequence(name):
vlen = len(self.images)
assert vlen >= cfg.DATASET.VIDEO_LEN, \
"Detected video shorter [{}] than training length [{}]".format(vlen, \
cfg.DATASET.VIDEO_LEN)
self.sequence_ids.append(vlen)
self.sequence_names.append(name) |
self.images = []
self.masks = []
self.flags = []
token = None
with open(split_fn, "r") as lines:
for line in lines:
_flag, _image, _mask = line.strip("\n").split(' ')
# save every frame
#_flag = 1
self.flags.append(int(_flag))
_image = os.path.join(cfg.DATASET.ROOT, _image.lstrip('/'))
assert os.path.isfile(_image), '%s not found' % _image
# each sequence may have a different length
# do some book-keeping e.g. to ensure we have
# sequences long enough for subsequent sampling
_token = _image.split("/")[-2] # parent directory
# sequence ID is in the filename
#_token = os.path.basename(_image).split("_")[0]
if token != _token:
if not token is None:
add_sequence(token)
token = _token
self.images.append(_image)
if _mask is None:
self.masks.append(None)
else:
_mask = os.path.join(cfg.DATASET.ROOT, _mask.lstrip('/'))
#assert os.path.isfile(_mask), '%s not found' % _mask
self.masks.append(_mask)
# update the last sequence
# returns the total amount of frames
add_sequence(token)
print("Loaded {} sequences".format(len(self.sequence_ids)))
# definint data augmentation:
print("Dataloader: {}".format(split), " #", len(self.images))
print("\t {}: no augmentation".format(split))
self.tf = tf.Compose([tf.ToTensor(), tf.Normalize(mean=self.MEAN, std=self.STD)])
self._num_samples = len(self.images)
def __len__(self):
return len(self.sequence_ids)
def _mask2tensor(self, mask, num_classes=6):
h,w = mask.shape
ones = torch.ones(1,h,w)
zeros = torch.zeros(num_classes,h,w)
max_idx = mask.max()
assert max_idx < num_classes, "{} >= {}".format(max_idx, num_classes)
return zeros.scatter(0, mask[None, ...], ones)
def denorm(self, image):
if image.dim() == 3:
assert image.dim() == 3, "Expected image [CxHxW]"
assert image.size(0) == 3, "Expected RGB image [3xHxW]"
for t, m, s in zip(image, self.MEAN, self.STD):
t.mul_(s).add_(m)
elif image.dim() == 4:
# batch mode
assert image.size(1) == 3, "Expected RGB image [3xHxW]"
for t, m, s in zip((0,1,2), self.MEAN, self.STD):
image[:, t, :, :].mul_(s).add_(m)
return image
def __getitem__(self, index):
seq_to = self.sequence_ids[index]
seq_from = 0 if index == 0 else self.sequence_ids[index - 1]
image0 = Image.open(self.images[seq_from])
w,h = image0.size
images, masks, fns, flags = [], [], [], []
tracks = torch.LongTensor(self.cfg.DATASET.NUM_CLASSES).fill_(-1)
masks = torch.LongTensor(self.cfg.DATASET.NUM_CLASSES, h, w).zero_()
known_ids = set()
for t in range(seq_from, seq_to):
t0 = t - seq_from
image = Image.open(self.images[t]).convert('RGB')
fns.append(os.path.basename(self.images[t].replace(".jpg", "")))
flags.append(self.flags[t])
if os.path.isfile(self.masks[t]):
mask = Image.open(self.masks[t])
mask = torch.from_numpy(np.array(mask, np.long, copy=False))
unique_ids = np.unique(mask)
for oid in unique_ids:
if not oid in known_ids:
tracks[oid] = t0
known_ids.add(oid)
masks[oid] = (mask == oid).long()
else:
mask = Image.new('L', image.size)
image = self.tf(image)
images.append(image)
images = torch.stack(images, 0)
seq_name = self.sequence_names[index]
flags = torch.LongTensor(flags)
return images, images, masks, tracks, len(known_ids), fns, flags, seq_name | return vlen |
main_test.go | package main
import (
"strings"
"testing"
"github.com/kylelemons/godebug/diff"
)
func TestUpgrade(t *testing.T) | {
cases := []struct {
name string
input string
expected string
expectedErr error
}{
{
name: "no terragrunt attribute",
input: `
include {
path = "${find_in_parent_folders()}"
}
`,
expected: "",
expectedErr: errNotTerragruntConfig,
},
{
name: "simple config",
input: `
terragrunt = {
include {
path = "${find_in_parent_folders()}"
}
terraform {
source = "git::ssh://[email protected]/org/module.git//module?ref=master"
}
}
`,
expected: `
include {
path = find_in_parent_folders()
}
terraform {
source = "git::ssh://[email protected]/org/module.git//module?ref=master"
}
`,
expectedErr: nil,
},
{
name: "simple with inputs",
input: `
terragrunt = {
include {
path = "${find_in_parent_folders()}"
}
terraform {
source = "git::ssh://[email protected]/org/module.git//module?ref=master"
}
}
domain = "app.foo.com"
instance_type = "m5.xlarge"
instance_count = 10
autoscale = true
autoscale_config = {
min = 5
max = 15
}
allowed_ports = [80, 443]
`,
expected: `
include {
path = find_in_parent_folders()
}
terraform {
source = "git::ssh://[email protected]/org/module.git//module?ref=master"
}
inputs = {
domain = "app.foo.com"
instance_type = "m5.xlarge"
instance_count = 10
autoscale = true
autoscale_config = {
min = 5
max = 15
}
allowed_ports = [80, 443]
}
`,
expectedErr: nil,
},
{
name: "complex config",
input: `/*
* ad-hoc comment
*/
// this will be lost
terragrunt = {
// this should be preserved
include {
path = "${find_in_parent_folders()}"
}
# comment
# with multiple
# lines
// and multiple
// styles...?!
terraform {
source = "git::ssh://[email protected]/org/module.git//module?ref=v123" // private repo
extra_arguments "foo" {
commands = ["plan"]
arguments = ["-var", "foo=bar"]
}
}
// more advanced settings
dependencies {
paths = ["./foo"]
}
iam_role = "terragrunt-iam-role"
prevent_destroy = true
skip = false
/*
* remote state settings
*/
remote_state = {
backend = "s3"
config {
key = "${path_relative_to_include()}/terraform.tfstate"
encrypt = true
bucket = "my-tfstate"
dynamodb_table = "terraform-state-locks"
region = "us-east-1"
s3_bucket_tags {
name = "Terraform state storage"
}
dynamodb_table_tags {
name = "Terraform lock table"
}
}
}
}
# some more comments
# this time it's
# a multi-line comment
domain = "app.foo.com"
instance_type = "m5.xlarge"
instance_count = 10
autoscale = true
// detached between literals
some_other_var = "foo"
another_one = 12
list_var = ["abc", "def", "ghi"]
// here's an ad hoc comment
complex = {
some_list = ["abc", "def"]
some_bool = true
some_int = 5
some_str = "random"
some_nested_obj = {
abc = "baz"
}
}
some_obj_list = [
{
foo = "bar"
},
{
baz = "quux"
},
{
quux = <<-EOF
This is an indented heredoc
EOF
},
]
some_heredoc = <<EOF
#!/bin/bash
echo "here's a shell script"
EOF
`,
expected: `
/*
* ad-hoc comment
*/
// this should be preserved
include {
path = find_in_parent_folders()
}
# comment
# with multiple
# lines
// and multiple
// styles...?!
terraform {
source = "git::ssh://[email protected]/org/module.git//module?ref=v123" // private repo
extra_arguments "foo" {
commands = ["plan"]
arguments = ["-var", "foo=bar"]
}
}
// more advanced settings
dependencies {
paths = ["./foo"]
}
iam_role = "terragrunt-iam-role"
prevent_destroy = true
skip = false
/*
* remote state settings
*/
remote_state {
backend = "s3"
config = {
key = "${path_relative_to_include()}/terraform.tfstate"
encrypt = true
bucket = "my-tfstate"
dynamodb_table = "terraform-state-locks"
region = "us-east-1"
s3_bucket_tags = {
name = "Terraform state storage"
}
dynamodb_table_tags = {
name = "Terraform lock table"
}
}
}
inputs = {
# some more comments
# this time it's
# a multi-line comment
domain = "app.foo.com"
instance_type = "m5.xlarge"
instance_count = 10
autoscale = true
// detached between literals
some_other_var = "foo"
another_one = 12
list_var = ["abc", "def", "ghi"]
// here's an ad hoc comment
complex = {
some_list = ["abc", "def"]
some_bool = true
some_int = 5
some_str = "random"
some_nested_obj = {
abc = "baz"
}
}
some_obj_list = [
{
foo = "bar"
},
{
baz = "quux"
},
{
quux = <<EOF
This is an indented heredoc
EOF
},
]
some_heredoc = <<EOF
#!/bin/bash
echo "here's a shell script"
EOF
}
`,
expectedErr: nil,
},
{
name: "rename functions",
input: `
terragrunt = {
include {
path = "${find_in_parent_folders()}"
}
terraform {
extra_arguments "args" {
commands = ["plan", "apply"]
required_var_files = [
"${get_parent_tfvars_dir()}/terraform.tfvars",
"${get_tfvars_dir()}/../common.tfvars",
]
}
}
}
`,
expected: `
include {
path = find_in_parent_folders()
}
terraform {
extra_arguments "args" {
commands = ["plan", "apply"]
required_var_files = [
"${get_parent_terragrunt_dir()}/terraform.tfvars",
"${get_terragrunt_dir()}/../common.tfvars",
]
}
}
`,
expectedErr: nil,
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
cmd := command{}
actual, err := cmd.upgrade([]byte(c.input))
if err != nil && c.expectedErr == nil {
t.Fatalf("unexpected error: %v", err)
} else if c.expectedErr != nil && err != c.expectedErr {
t.Fatalf("incorrect error: got=%v want=%v", err, c.expectedErr)
}
// ditch the leading newline - used above to make the formatting a bit nicer
expected := strings.TrimLeft(c.expected, "\n")
if string(actual) != expected {
t.Errorf("incorrect result (-want, +got):\n%s\n", diff.Diff(string(actual), expected))
}
})
}
} |
|
test.rs | use crate::command_prelude::*;
use cargo::ops::{self, CompileFilter, FilterRule, LibRule};
use cargo::util::errors;
use failure::Fail;
pub fn cli() -> App {
subcommand("test")
// Subcommand aliases are handled in `aliased_command()`.
// .alias("t")
.setting(AppSettings::TrailingVarArg)
.about("Execute all unit and integration tests and build examples of a local package")
.arg(
Arg::with_name("TESTNAME")
.help("If specified, only run tests containing this string in their names"),
)
.arg(
Arg::with_name("args")
.help("Arguments for the test binary")
.multiple(true)
.last(true),
)
.arg(
opt(
"quiet",
"Display one character per test instead of one line",
)
.short("q"),
)
.arg_targets_all(
"Test only this package's library unit tests",
"Test only the specified binary",
"Test all binaries",
"Test only the specified example", | "Test all tests",
"Test only the specified bench target",
"Test all benches",
"Test all targets",
)
.arg(opt("doc", "Test only this library's documentation"))
.arg(opt("no-run", "Compile, but don't run tests"))
.arg(opt("no-fail-fast", "Run all tests regardless of failure"))
.arg_package_spec(
"Package to run tests for",
"Test all packages in the workspace",
"Exclude packages from the test",
)
.arg_jobs()
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
.arg_features()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
.arg_manifest_path()
.arg_message_format()
.after_help(
"\
The test filtering argument TESTNAME and all the arguments following the
two dashes (`--`) are passed to the test binaries and thus to libtest
(rustc's built in unit-test and micro-benchmarking framework). If you're
passing arguments to both Cargo and the binary, the ones after `--` go to the
binary, the ones before go to Cargo. For details about libtest's arguments see
the output of `cargo test -- --help`. As an example, this will run all
tests with `foo` in their name on 3 threads in parallel:
cargo test foo -- --test-threads 3
If the `--package` argument is given, then SPEC is a package ID specification
which indicates which package should be tested. If it is not given, then the
current package is tested. For more information on SPEC and its format, see the
`cargo help pkgid` command.
All packages in the workspace are tested if the `--workspace` flag is supplied. The
`--workspace` flag is automatically assumed for a virtual manifest.
Note that `--exclude` has to be specified in conjunction with the `--workspace` flag.
The `--jobs` argument affects the building of the test executable but does
not affect how many jobs are used when running the tests. The default value
for the `--jobs` argument is the number of CPUs. If you want to control the
number of simultaneous running test cases, pass the `--test-threads` option
to the test binaries:
cargo test -- --test-threads=1
Compilation can be configured via the `test` profile in the manifest.
By default the rust test harness hides output from test execution to
keep results readable. Test output can be recovered (e.g., for debugging)
by passing `--nocapture` to the test binaries:
cargo test -- --nocapture
To get the list of all options available for the test binaries use this:
cargo test -- --help
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult {
let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options(
config,
CompileMode::Test,
Some(&ws),
ProfileChecking::Checked,
)?;
compile_opts.build_config.profile_kind = args.get_profile_kind(
config,
ProfileKind::Custom("test".to_owned()),
ProfileChecking::Checked,
)?;
// `TESTNAME` is actually an argument of the test binary, but it's
// important, so we explicitly mention it and reconfigure.
let test_name: Option<&str> = args.value_of("TESTNAME");
let test_args = args.value_of("TESTNAME").into_iter();
let test_args = test_args.chain(args.values_of("args").unwrap_or_default());
let test_args = test_args.collect::<Vec<_>>();
let no_run = args.is_present("no-run");
let doc = args.is_present("doc");
if doc {
if let CompileFilter::Only { .. } = compile_opts.filter {
return Err(CliError::new(
failure::format_err!("Can't mix --doc with other target selecting options"),
101,
));
}
if no_run {
return Err(CliError::new(
failure::format_err!("Can't skip running doc tests with --no-run"),
101,
));
}
compile_opts.build_config.mode = CompileMode::Doctest;
compile_opts.filter = ops::CompileFilter::new(
LibRule::True,
FilterRule::none(),
FilterRule::none(),
FilterRule::none(),
FilterRule::none(),
);
} else if test_name.is_some() {
if let CompileFilter::Default { .. } = compile_opts.filter {
compile_opts.filter = ops::CompileFilter::new(
LibRule::Default, // compile the library, so the unit tests can be run filtered
FilterRule::All, // compile the binaries, so the unit tests in binaries can be run filtered
FilterRule::All, // compile the tests, so the integration tests can be run filtered
FilterRule::none(), // specify --examples to unit test binaries filtered
FilterRule::none(), // specify --benches to unit test benchmarks filtered
); // also, specify --doc to run doc tests filtered
}
}
let ops = ops::TestOptions {
no_run,
no_fail_fast: args.is_present("no-fail-fast"),
compile_opts,
};
let err = ops::run_tests(&ws, &ops, &test_args)?;
match err {
None => Ok(()),
Some(err) => {
let context = failure::format_err!("{}", err.hint(&ws, &ops.compile_opts));
let e = match err.exit.as_ref().and_then(|e| e.code()) {
// Don't show "process didn't exit successfully" for simple errors.
Some(i) if errors::is_simple_exit_code(i) => CliError::new(context, i),
Some(i) => CliError::new(err.context(context).into(), i),
None => CliError::new(err.context(context).into(), 101),
};
Err(e)
}
}
} | "Test all examples",
"Test only the specified test target", |
endpoint.go | /*
* Copyright (c) 2021 Huy Duc Dao
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gin
import (
"github.com/gin-gonic/gin"
opencensus2 "github.com/starvn/sonic/telemetry/opencensus"
"github.com/starvn/turbo/config"
"github.com/starvn/turbo/proxy"
sgin "github.com/starvn/turbo/route/gin"
"go.opencensus.io/plugin/ochttp"
"go.opencensus.io/plugin/ochttp/propagation/b3"
"go.opencensus.io/stats"
"go.opencensus.io/tag"
"go.opencensus.io/trace"
"go.opencensus.io/trace/propagation"
"net/http"
"time"
)
func | (hf sgin.HandlerFactory) sgin.HandlerFactory {
return func(cfg *config.EndpointConfig, p proxy.Proxy) gin.HandlerFunc {
return HandlerFunc(cfg, hf(cfg, p), nil)
}
}
func HandlerFunc(cfg *config.EndpointConfig, next gin.HandlerFunc, prop propagation.HTTPFormat) gin.HandlerFunc {
if !opencensus2.IsRouterEnabled() {
return next
}
if prop == nil {
prop = &b3.HTTPFormat{}
}
pathExtractor := opencensus2.GetAggregatedPathForMetrics(cfg)
h := &handler{
name: cfg.Endpoint,
propagation: prop,
Handler: next,
StartOptions: trace.StartOptions{
SpanKind: trace.SpanKindServer,
},
tags: []tagGenerator{
func(r *http.Request) tag.Mutator { return tag.Upsert(ochttp.KeyServerRoute, cfg.Endpoint) },
func(r *http.Request) tag.Mutator { return tag.Upsert(ochttp.Host, r.Host) },
func(r *http.Request) tag.Mutator { return tag.Upsert(ochttp.Method, r.Method) },
func(r *http.Request) tag.Mutator { return tag.Upsert(ochttp.Path, pathExtractor(r)) },
},
}
return h.HandlerFunc
}
type handler struct {
name string
propagation propagation.HTTPFormat
Handler gin.HandlerFunc
StartOptions trace.StartOptions
IsPublicEndpoint bool
tags []tagGenerator
}
type tagGenerator func(*http.Request) tag.Mutator
func (h *handler) HandlerFunc(c *gin.Context) {
var traceEnd, statsEnd func()
c.Request, traceEnd = h.startTrace(c.Writer, c.Request)
c.Writer, statsEnd = h.startStats(c.Writer, c.Request)
c.Set(opencensus2.ContextKey, trace.FromContext(c.Request.Context()))
h.Handler(c)
statsEnd()
traceEnd()
}
func (h *handler) startTrace(_ gin.ResponseWriter, r *http.Request) (*http.Request, func()) {
ctx := r.Context()
var span *trace.Span
sc, ok := h.extractSpanContext(r)
if ok && !h.IsPublicEndpoint {
ctx, span = trace.StartSpanWithRemoteParent(
ctx,
h.name,
sc,
trace.WithSampler(h.StartOptions.Sampler),
trace.WithSpanKind(h.StartOptions.SpanKind),
)
} else {
ctx, span = trace.StartSpan(
ctx,
h.name,
trace.WithSampler(h.StartOptions.Sampler),
trace.WithSpanKind(h.StartOptions.SpanKind),
)
if ok {
span.AddLink(trace.Link{
TraceID: sc.TraceID,
SpanID: sc.SpanID,
Type: trace.LinkTypeChild,
Attributes: nil,
})
}
}
span.AddAttributes(opencensus2.RequestAttrs(r)...)
return r.WithContext(ctx), span.End
}
func (h *handler) extractSpanContext(r *http.Request) (trace.SpanContext, bool) {
return h.propagation.SpanContextFromRequest(r)
}
func (h *handler) startStats(w gin.ResponseWriter, r *http.Request) (gin.ResponseWriter, func()) {
tags := make([]tag.Mutator, len(h.tags))
for i, t := range h.tags {
tags[i] = t(r)
}
ctx, _ := tag.New(r.Context(), tags...)
track := &trackingResponseWriter{
start: time.Now(),
ctx: ctx,
ResponseWriter: w,
}
if r.Body == nil {
// TODO: Handle cases where ContentLength is not set.
track.reqSize = -1
} else if r.ContentLength > 0 {
track.reqSize = r.ContentLength
}
stats.Record(ctx, ochttp.ServerRequestCount.M(1))
return track, track.end
}
| New |
tlb.rs | //! Assembly routines for the translation lokaside buffer.
/// Invalidates the translation lookaside buffer entry for the
/// page containing the specified address.
/// [More documentation here](http://x86.renejeschke.de/html/file_module_x86_id_144.html).
///
/// # Safety
///
/// Will produce a General Protection fault if the current
/// priviledge level is not 0.
pub unsafe fn invalidate_entry(address: usize) {
asm!("invlpg ($0)" :: "r" (address) : "memory");
}
pub unsafe fn flush_tlb() | {
let cr3: usize;
// Mov cr3 reg out
asm!("mov %cr3, $0" : "=r" (cr3));
// Write value to cr3 reg again
asm!("mov $0, %cr3" :: "r" (cr3) : "memory");
} |
|
main.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// [START osconfig_v1beta_generated_OsConfigService_ListPatchJobs_sync]
package main
import (
"context"
osconfig "cloud.google.com/go/osconfig/apiv1beta"
"google.golang.org/api/iterator"
osconfigpb "google.golang.org/genproto/googleapis/cloud/osconfig/v1beta"
)
func main() |
// [END osconfig_v1beta_generated_OsConfigService_ListPatchJobs_sync]
| {
// import osconfigpb "google.golang.org/genproto/googleapis/cloud/osconfig/v1beta"
// import "google.golang.org/api/iterator"
ctx := context.Background()
c, err := osconfig.NewClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &osconfigpb.ListPatchJobsRequest{
// TODO: Fill request struct fields.
}
it := c.ListPatchJobs(ctx, req)
for {
resp, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
} |
bitcoin_es_VE.ts | <TS language="es_VE" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Click derecho para editar la dirección o etiqueta</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Crear una nueva dirección</translation>
</message>
<message>
<source>&New</source>
<translation>&Nuevo</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copie las direcciones seleccionadas actualmente al portapapeles del sistema</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Copiar</translation>
</message>
<message>
<source>C&lose</source>
<translation>C&errar</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Borrar las direcciones seleccionadas recientemente de la lista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos en la pestaña actual a un archivo</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Borrar</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Diálogo contraseña</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Ingresa frase de contraseña</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nueva frase de contraseña</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Repetir nueva frase de contraseña</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Firmar &mensaje...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<source>Node</source>
<translation>Nodo</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Mostrar visión general de la billetera</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transacciones</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Buscar historial de transacciones</translation>
</message>
<message>
<source>E&xit</source>
<translation>S&alir</translation>
</message>
<message>
<source>Quit application</source>
<translation>Quitar aplicación</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opciones...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>Recepción de direcciones</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindexando bloques en el disco...</translation>
</message>
<message>
<source>Send coins to a Bitcoin address</source>
<translation>Enviar monedas a una dirección Array</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Respaldar billetera en otra ubicación</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambiar frase secreta usada para la encriptación de la billetera</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Abre la consola de depuración y diágnostico</translation>
</message>
<message>
<source>Bitcoin</source>
<translation>Array</translation>
</message>
<message>
<source>Wallet</source>
<translation>Billetera</translation>
</message>
<message>
<source>&Send</source>
<translation>&Enviar</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Recibir</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Mostar / Ocultar</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Mostar u ocultar la ventana principal</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Encriptar las llaves privadas que pertenecen a tu billetera</translation>
</message>
<message>
<source>Sign messages with your Bitcoin addresses to prove you own them</source>
<translation>Firma mensajes con tus direcciones Array para probar que eres dueño de ellas</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Bitcoin addresses</source>
<translation>Verificar mensajes para asegurar que estaban firmados con direcciones Array especificas</translation>
</message>
<message>
<source>&File</source>
<translation>&Archivo</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Configuración</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>Opciones de línea de comandos</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 detrás</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>El último bloque recibido fue generado hace %1 hora(s).</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transacciones después de esta no serán visibles todavía.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>Warning</source>
<translation>Advertencia</translation>
</message>
<message>
<source>Information</source>
<translation>Información</translation>
</message>
<message>
<source>Up to date</source>
<translation>Al día</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Alcanzando...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Transacción enviada</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Transacción entrante</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>La billetera está encriptada y desbloqueada recientemente</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>La billetera está encriptada y bloqueada recientemente</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Selección de moneda</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Monto:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Comisión:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Polvo:</translation>
</message>
<message>
<source>Change:</source>
<translation>Cambio:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(de)seleccionar todo</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Modo de árbol</translation>
</message>
<message>
<source>List mode</source>
<translation>Modo de lista</translation>
</message>
<message>
<source>Amount</source>
<translation>Monto</translation>
</message>
<message>
<source>Received with label</source>
<translation>Recibido con etiqueta</translation>
</message>
<message>
<source>Received with address</source>
<translation>Recibido con dirección</translation>
</message>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Confirmaciones</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Editar dirección</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>La etiqueta asociada con esta entrada de la lista de direcciones</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>La dirección asociada con esta entrada de la lista de direcciones. Esta puede ser modificada solo para el envío de direcciones.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Dirección</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Un nuevo directorio de datos será creado.</translation>
</message>
<message>
<source>name</source>
<translation>nombre</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>El directorio ya existe. Agrega %1 si tiene la intención de crear un nuevo directorio aquí.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>La ruta ya existe, y no es un directorio.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>No puede crear directorio de datos aquí.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>versión</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Opciones de línea de comandos</translation>
</message>
<message>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<source>command-line options</source>
<translation>opciones de línea de comandos</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Use the default data directory</source>
<translation>Usar el directorio de datos por defecto</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Usa un directorio de datos personalizado:</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Error: Directorio de datos especificado "%1" no puede ser creado.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
<context>
<name>ModalOverlay</name>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Abrir URI</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Abrir solicitud de pago desde URI o archivo</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Seleccionar archivo de solicitud de pago</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opciones</translation>
</message>
<message>
<source>&Main</source>
<translation>&Main</translation>
</message>
<message>
<source>&Network</source>
<translation>&Red</translation>
</message>
<message>
<source>W&allet</source>
<translation>Billetera</translation>
</message>
<message>
<source>Expert</source>
<translation>Experto</translation>
</message>
<message>
<source>none</source>
<translation>ninguno</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Available:</source>
<translation>Disponible:</translation>
</message>
<message>
<source>Pending:</source>
<translation>Pendiente:</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Monto</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 y %2</translation>
</message>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>&Information</source>
<translation>Información</translation>
</message>
<message>
<source>In:</source>
<translation>Entrada:</translation>
</message>
<message>
<source>Out:</source>
<translation>Salida:</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>Monto:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>Show</source>
<translation>Mostrar</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Copy &Address</source>
<translation>&Copiar Dirección</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Quantity:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Monto:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Comisión:</translation>
</message>
<message>
<source>Change:</source>
<translation>Cambio:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Polvo:</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Monto:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context> | <context>
<name>TransactionView</name>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Opciones:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Especifique directorio de datos</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Conecte un nodo para recuperar direcciones pares, y desconecte</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Especifique su propia dirección pública</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceptar linea de comando y comandos JSON-RPC</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Correr en segundo plano como daemon y aceptar comandos</translation>
</message>
<message>
<source>Bitcoin Core</source>
<translation>Array</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Enlazar dirección dada y siempre escuchar en ella. Usar [host]:port notación para IPv6</translation>
</message>
<message>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation>Borrar todas las transacciones de la billetera y solo recuperar aquellas partes de la cadena de bloques a través de -rescan en el inicio del sistema.</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Ejecutar comando cuando una transacción de la billetera cambia (%s en cmd es reemplazado por TxID)</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Fija el número de verificación de hilos de script (%u a %d, 0 = auto, <0 = leave that many cores free, default: %d)</translation>
</message>
<message>
<source>Information</source>
<translation>Información</translation>
</message>
<message>
<source>Warning</source>
<translation>Advertencia</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
</TS> | <context>
<name>TransactionTableModel</name>
</context> |
utils.py | """Utility functions and classes for the STIX2 library."""
import datetime as dt
import enum
import json
import re
import pytz
import six
import stix2
# Sentinel value for properties that should be set to the current time.
# We can't use the standard 'default' approach, since if there are multiple
# timestamps in a single object, the timestamps will vary by a few microseconds.
NOW = object()
PREFIX_21_REGEX = re.compile(r'^[a-z].*')
_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
_TIMESTAMP_FORMAT_FRAC = "%Y-%m-%dT%H:%M:%S.%fZ"
class Precision(enum.Enum):
"""
Timestamp format precisions.
"""
# auto() wasn't introduced until Python 3.6.
ANY = 1
SECOND = 2
MILLISECOND = 3
class PrecisionConstraint(enum.Enum):
"""
Timestamp precision constraints. These affect how the Precision
values are applied when formatting a timestamp.
These constraints don't really make sense with the ANY precision, so they
have no effect in that case.
"""
EXACT = 1 # format must have exactly the given precision
MIN = 2 # format must have at least the given precision
# no need for a MAX constraint yet
def _to_enum(value, enum_type, enum_default=None):
"""
Detect and convert strings to enums and None to a default enum. This
allows use of strings and None in APIs, while enforcing the enum type: if
you use a string, it must name a valid enum value. This implementation is
case-insensitive.
:param value: A value to be interpreted as an enum (string, Enum instance,
or None). If an Enum instance, it must be an instance of enum_type.
:param enum_type: The enum type which strings will be interpreted against
:param enum_default: The default enum to use if value is None. Must be
an instance of enum_type, or None. If None, you are disallowing a
default and requiring that value be non-None.
:return: An instance of enum_type
:raises TypeError: If value was neither an instance of enum_type, None, nor
a string
:raises KeyError: If value was a string which couldn't be interpreted as an
enum value from enum_type
"""
assert enum_default is None or isinstance(enum_default, enum_type)
if not isinstance(value, enum_type):
if value is None and enum_default is not None:
value = enum_default
elif isinstance(value, six.string_types):
value = enum_type[value.upper()]
else:
raise TypeError("Not a valid {}: {}".format(
enum_type.__name__, value,
))
return value
class STIXdatetime(dt.datetime):
"""
Bundle a datetime with some format-related metadata, so that JSON
serialization has the info it needs to produce compliant timestamps.
"""
def __new__(cls, *args, **kwargs):
precision = _to_enum(
kwargs.pop("precision", Precision.ANY),
Precision,
)
precision_constraint = _to_enum(
kwargs.pop("precision_constraint", PrecisionConstraint.EXACT),
PrecisionConstraint,
)
if isinstance(args[0], dt.datetime): # Allow passing in a datetime object
dttm = args[0]
args = (
dttm.year, dttm.month, dttm.day, dttm.hour, dttm.minute,
dttm.second, dttm.microsecond, dttm.tzinfo,
)
# self will be an instance of STIXdatetime, not dt.datetime
self = dt.datetime.__new__(cls, *args, **kwargs)
self.precision = precision
self.precision_constraint = precision_constraint
return self
def __repr__(self):
return "'%s'" % format_datetime(self)
def deduplicate(stix_obj_list):
"""Deduplicate a list of STIX objects to a unique set.
Reduces a set of STIX objects to unique set by looking
at 'id' and 'modified' fields - as a unique object version
is determined by the combination of those fields
Note: Be aware, as can be seen in the implementation
of deduplicate(),that if the "stix_obj_list" argument has
multiple STIX objects of the same version, the last object
version found in the list will be the one that is returned.
Args:
stix_obj_list (list): list of STIX objects (dicts)
Returns:
A list with a unique set of the passed list of STIX objects.
"""
unique_objs = {}
for obj in stix_obj_list:
try:
unique_objs[(obj['id'], obj['modified'])] = obj
except KeyError:
# Handle objects with no `modified` property, e.g. marking-definition
unique_objs[(obj['id'], obj['created'])] = obj
return list(unique_objs.values())
def get_timestamp():
"""Return a STIX timestamp of the current date and time."""
return STIXdatetime.now(tz=pytz.UTC)
def format_datetime(dttm):
"""Convert a datetime object into a valid STIX timestamp string.
1. Convert to timezone-aware
2. Convert to UTC
3. Format in ISO format
4. Ensure correct precision
a. Add subsecond value if warranted, according to precision settings
5. Add "Z"
"""
if dttm.tzinfo is None or dttm.tzinfo.utcoffset(dttm) is None:
# dttm is timezone-naive; assume UTC
zoned = pytz.utc.localize(dttm)
else:
zoned = dttm.astimezone(pytz.utc)
ts = zoned.strftime('%Y-%m-%dT%H:%M:%S')
precision = getattr(dttm, 'precision', Precision.ANY)
precision_constraint = getattr(
dttm, 'precision_constraint', PrecisionConstraint.EXACT,
)
frac_seconds_str = ""
if precision == Precision.ANY:
# No need to truncate; ignore constraint
if zoned.microsecond:
frac_seconds_str = "{:06d}".format(zoned.microsecond).rstrip("0")
elif precision == Precision.SECOND:
if precision_constraint == PrecisionConstraint.MIN:
# second precision, or better. Winds up being the same as ANY:
# just use all our digits
if zoned.microsecond:
frac_seconds_str = "{:06d}".format(zoned.microsecond)\
.rstrip("0")
# exact: ignore microseconds entirely
else:
# precision == millisecond
if precision_constraint == PrecisionConstraint.EXACT:
# can't rstrip() here or we may lose precision
frac_seconds_str = "{:06d}".format(zoned.microsecond)[:3]
else:
# millisecond precision, or better. So we can rstrip() zeros, but
# only to a length of at least 3 digits (ljust() adds zeros back,
# if it stripped too far.)
frac_seconds_str = "{:06d}"\
.format(zoned.microsecond)\
.rstrip("0")\
.ljust(3, "0")
ts = "{}{}{}Z".format(
ts,
"." if frac_seconds_str else "",
frac_seconds_str,
)
| return ts
def parse_into_datetime(
value, precision=Precision.ANY,
precision_constraint=PrecisionConstraint.EXACT,
):
"""
Parse a value into a valid STIX timestamp object. Also, optionally adjust
precision of fractional seconds. This allows alignment with JSON
serialization requirements, and helps ensure we're not using extra
precision which would be lost upon JSON serialization. The precision
info will be embedded in the returned object, so that JSON serialization
will format it correctly.
:param value: A datetime.datetime or datetime.date instance, or a string
:param precision: A precision value: either an instance of the Precision
enum, or a string naming one of the enum values (case-insensitive)
:param precision_constraint: A precision constraint value: either an
instance of the PrecisionConstraint enum, or a string naming one of
the enum values (case-insensitive)
:return: A STIXdatetime instance, which is a datetime but also carries the
precision info necessary to properly JSON-serialize it.
"""
precision = _to_enum(precision, Precision)
precision_constraint = _to_enum(precision_constraint, PrecisionConstraint)
if isinstance(value, dt.date):
if hasattr(value, 'hour'):
ts = value
else:
# Add a time component
ts = dt.datetime.combine(value, dt.time(0, 0, tzinfo=pytz.utc))
else:
# value isn't a date or datetime object so assume it's a string
fmt = _TIMESTAMP_FORMAT_FRAC if "." in value else _TIMESTAMP_FORMAT
try:
parsed = dt.datetime.strptime(value, fmt)
except (TypeError, ValueError):
# Unknown format
raise ValueError(
"must be a datetime object, date object, or "
"timestamp string in a recognizable format.",
)
if parsed.tzinfo:
ts = parsed.astimezone(pytz.utc)
else:
# Doesn't have timezone info in the string; assume UTC
ts = pytz.utc.localize(parsed)
# Ensure correct precision
if precision == Precision.SECOND:
if precision_constraint == PrecisionConstraint.EXACT:
ts = ts.replace(microsecond=0)
# else, no need to modify fractional seconds
elif precision == Precision.MILLISECOND:
if precision_constraint == PrecisionConstraint.EXACT:
us = (ts.microsecond // 1000) * 1000
ts = ts.replace(microsecond=us)
# else: at least millisecond precision: the constraint will affect JSON
# formatting, but there's nothing we need to do here.
# else, precision == Precision.ANY: nothing for us to do.
return STIXdatetime(
ts, precision=precision, precision_constraint=precision_constraint,
)
def _get_dict(data):
"""Return data as a dictionary.
Input can be a dictionary, string, or file-like object.
"""
if type(data) is dict:
return data
else:
try:
return json.loads(data)
except TypeError:
pass
try:
return json.load(data)
except AttributeError:
pass
try:
return dict(data)
except (ValueError, TypeError):
raise ValueError("Cannot convert '%s' to dictionary." % str(data))
def get_class_hierarchy_names(obj):
"""Given an object, return the names of the class hierarchy."""
names = []
for cls in obj.__class__.__mro__:
names.append(cls.__name__)
return names
def get_type_from_id(stix_id):
return stix_id.split('--', 1)[0]
def is_marking(obj_or_id):
"""Determines whether the given object or object ID is/is for a marking
definition.
:param obj_or_id: A STIX object or object ID as a string.
:return: True if a marking definition, False otherwise.
"""
if isinstance(obj_or_id, (stix2.base._STIXBase, dict)):
result = obj_or_id["type"] == "marking-definition"
else:
# it's a string ID
result = obj_or_id.startswith("marking-definition--")
return result | |
fornecedor-detalhes.component.ts | import { Component, Inject, OnInit } from '@angular/core';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
import { FornecedoresListagemComponent } from '../../containers/fornecedores-listagem/fornecedores-listagem.component';
import { Fornecedores } from '../../models/fornecedores.model';
import { FornecedoresService } from '../../services/fornecedores.services';
@Component({
selector: 'app-fornecedor-detalhes',
templateUrl: './fornecedor-detalhes.component.html',
styleUrls: ['./fornecedor-detalhes.component.css']
})
export class | implements OnInit {
public id_fornecedor: number;
public nome: string;
public cnpj: string;
public endereco: string;
public numero: string;
public bairro: string;
public cep: string;
public cidade: string;
public uf: string;
public telefone: string;
public celular: string;
public email: string;
public status: string;
public fornecedor: Fornecedores;
constructor(private fornecedoresService: FornecedoresService,
private dialogRef: MatDialogRef<FornecedoresListagemComponent>, @Inject(MAT_DIALOG_DATA) data) {
this.id_fornecedor = data.id_fornecedor;
}
ngOnInit(): void {
this.getFornecedor();
}
private getFornecedor() {
this.fornecedoresService.getFornecedores().subscribe(fornecedor => {
this.fornecedor = fornecedor.find(a => a.id_fornecedor == this.id_fornecedor);
this.nome = this.fornecedor.nome;
this.cnpj = this.fornecedor.cnpj;
this.endereco = this.fornecedor.endereco;
this.numero = this.fornecedor.numero;
this.bairro = this.fornecedor.bairro;
this.cep = this.fornecedor.cep;
this.cidade = this.fornecedor.cidade;
this.uf = this.fornecedor.uf;
this.telefone = this.fornecedor.telefone;
this.celular = this.fornecedor.celular;
this.email = this.fornecedor.email;
this.status = this.fornecedor.status;
})
}
}
| FornecedorDetalhesComponent |
z.rs | use super::{Tetromino, TetrominoCommon, SQUARE_COUNT};
use crate::color::Color;
use crate::playground::Playground;
use crate::playground::COLUMN_COUNT;
#[allow(unused_imports)]
use rand::{thread_rng, Error, Rng, RngCore};
const COLOR: Color = Color::Red;
pub struct Z {
squares: [usize; SQUARE_COUNT],
}
impl Z {
fn create(rng: &mut Box<dyn RngCore>) -> Self {
let index = rng.gen_range(2, COLUMN_COUNT - 2);
let mut squares = [0; SQUARE_COUNT];
squares[0] = index;
squares[1] = index + 1;
squares[2] = index + COLUMN_COUNT + 1;
squares[3] = index + COLUMN_COUNT + 2;
Z { squares }
}
}
impl TetrominoCommon for Z {
fn set_square(&mut self, index: usize, value: usize) {
self.squares[index] = value;
}
fn get_square(&self, index: usize) -> usize {
self.squares[index]
}
fn get_color(&self) -> Color {
COLOR
}
}
impl Tetromino for Z {
fn new() -> Self {
let mut rng = Box::new(thread_rng()) as Box<dyn RngCore>;
Z::create(&mut rng)
}
fn insert_into_playground(&self, playground: &mut Playground) -> bool {
<Z as TetrominoCommon>::insert_into_playground(self, playground)
}
fn go_down(&mut self, playground: &mut Playground) -> bool {
<Z as TetrominoCommon>::go_down(self, playground)
}
fn go_right(&mut self, playground: &mut Playground) -> bool {
<Z as TetrominoCommon>::go_right(self, playground)
}
fn go_left(&mut self, playground: &mut Playground) -> bool {
<Z as TetrominoCommon>::go_left(self, playground)
}
fn go_bottom(&mut self, playground: &mut Playground) -> bool {
<Z as TetrominoCommon>::go_bottom(self, playground)
}
}
impl Default for Z {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(test)]
struct FakeGenerator {
next_number: u32, | #[cfg(test)]
impl RngCore for FakeGenerator {
fn next_u32(&mut self) -> u32 {
self.next_number
}
fn next_u64(&mut self) -> u64 {
self.next_number as u64
}
fn fill_bytes(&mut self, _dest: &mut [u8]) {}
fn try_fill_bytes(&mut self, _dest: &mut [u8]) -> Result<(), Error> {
Result::Ok(())
}
}
#[cfg(test)]
fn get_fake_rand(next_number: u32) -> Box<dyn RngCore> {
Box::new(FakeGenerator { next_number }) as Box<dyn RngCore>
}
#[test]
fn insert_into_playground_true() {
// given
let mut fake_random = get_fake_rand(2);
let mut playground = Playground::new();
let tetromino = Z::create(&mut fake_random);
// when
let is_inserted =
<Z as TetrominoCommon>::insert_into_playground(&tetromino, &mut playground);
// then
assert_eq!(is_inserted, true);
let mut expected_squares = [0; 4];
expected_squares[0] = 2;
expected_squares[1] = 3;
expected_squares[2] = 13;
expected_squares[3] = 14;
assert_eq!(tetromino.squares, expected_squares);
assert_eq!(playground.get_squares()[2], Color::Red);
assert_eq!(playground.get_squares()[3], Color::Red);
assert_eq!(playground.get_squares()[13], Color::Red);
assert_eq!(playground.get_squares()[14], Color::Red);
}
#[test]
fn go_down_empty_playground() {
// given
let mut fake_random = get_fake_rand(2);
let mut playground = Playground::new();
let mut tetromino = Z::create(&mut fake_random);
<Z as TetrominoCommon>::insert_into_playground(&tetromino, &mut playground);
// when
let went_down = <Z as Tetromino>::go_down(&mut tetromino, &mut playground);
// then
assert_eq!(went_down, true);
let mut expected_squares = [0; 4];
expected_squares[0] = 12;
expected_squares[1] = 13;
expected_squares[2] = 23;
expected_squares[3] = 24;
assert_eq!(tetromino.squares, expected_squares);
assert_eq!(playground.get_squares()[2], Color::None);
assert_eq!(playground.get_squares()[3], Color::None);
assert_eq!(playground.get_squares()[12], Color::Red);
assert_eq!(playground.get_squares()[13], Color::Red);
assert_eq!(playground.get_squares()[23], Color::Red);
assert_eq!(playground.get_squares()[24], Color::Red);
}
} | } |
Plotter.py | import rospy
import PointCloud
reload(PointCloud)
import CoordinateFrames
reload(CoordinateFrames)
import Lines
reload(Lines)
import Image as ImagePy
reload(ImagePy)
import Text
reload(Text)
from PointCloud import PointCloudMarker
from PlotObject import PlotObject
from CoordinateFrames import CoordinateFramesMarker
from Image import ImageMarker
from sensor_msgs.msg import PointCloud2, Image
from visualization_msgs.msg import Marker, MarkerArray
publishedMessages = [] # to be able to delete them later with clf
class Plotter(object):
def __init__(self, initRosNode=True, rosNodeName=None, visFrame=None):
if initRosNode:
if rosNodeName is None:
rosNodeName = 'rviz_pyplot'#_%s'.format(uuid.uuid1().get_hex())
rospy.init_node(rosNodeName,['Plotter.py'], disable_signals=True)
if visFrame is None:
visFrame = "/rviz_pyplot"
self._visFrame = visFrame
self._publishers = {}
self._publishers[PointCloud2] = {}
self._publishers[MarkerArray] = {}
self._publishers[Image] = {}
self._defaultTopics = {}
self._defaultTopics[PointCloud2] = "{0}/points".format(rospy.get_name())
self._defaultTopics[MarkerArray] = "{0}/marker_array".format(rospy.get_name())
self._defaultTopics[Image] = "{0}/images".format(rospy.get_name())
# \todo publish transforms in a thread.
def __del__(self):
# \todo clean up ROS
pass
def clf(self):
global publishedMessages
for topic,msg in publishedMessages:
if type(msg) == Marker:
pub = self.getPublisher(Marker, topic)
msg.action = Marker.DELETE
if type(msg) == MarkerArray:
pub = self.getPublisher(MarkerArray, topic)
for m in msg.markers:
m.action = Marker.DELETE
else:
continue
pub.publish( msg )
publishedMessages = []
def getDefaultPointCloudTopic(self):
return self._defaultPointCloudTopic
| def getPublisher(self, messageType, topic=None, latch=True):
publisherList = self._publishers[messageType]
if topic is None:
topic = self._defaultTopics[messageType]
if topic in publisherList:
pub = publisherList[topic]
else:
# Initialize a new publisher
pub = rospy.Publisher(topic, messageType, latch=latch)
# Save the publisher for later
publisherList[topic] = pub
return pub
def activeTopics( self ):
return (self._pointCloudPubs.keys(), self._markerArrayPubs.keys())
def printActiveTopics( self ):
print "Point cloud topics:"
for key in self._pointCloudPubs.keys():
print "\t{0}".format(key)
print "Marker array topics:"
for key in self._markerArrayPubs.keys():
print "\t{0}".format(key)
def plot( self, plotItems, stamp=None ):
if stamp is None:
stamp = rospy.Time.now()
# Accumulate a list of point clouds and markers to publish
messages = []
if type(plotItems) == list:
for item in plotItems:
item.appendMessages(stamp, messages)
else:
# Assume this is a single plotItem
plotItems.appendMessages(stamp, messages)
global publishedMessages
topics = {}
for topic, msg in messages:
if type(msg) == PointCloud2:
pub = self.getPublisher(PointCloud2, topic)
# Always override the stamp. This is a design choice
# that may be revisited
msg.header.stamp = stamp
if msg.header.frame_id is None:
msg.header.frame_id = self._visFrame
pub.publish( msg )
publishedMessages.append( (topic,msg) )
elif type(msg) == Marker:
msg.header.stamp = stamp
if msg.header.frame_id is None:
msg.header.frame_id = self._visFrame
if topic in topics:
topics[topic].markers.append(msg)
else:
ma = MarkerArray()
ma.markers.append(msg)
topics[topic] = ma
elif type(msg) == Image:
pub = self.getPublisher(Image, topic)
# Always override the stamp. This is a design choice
# that may be revisited
msg.header.stamp = stamp
if msg.header.frame_id is None:
msg.header.frame_id = self._visFrame
pub.publish( msg )
publishedMessages.append( (topic,msg) )
else:
raise RuntimeError("Unknown message type {0}\n{1}".format(type(msg), msg))
for topic, ma in topics.iteritems():
pub = self.getPublisher(MarkerArray, topic)
pub.publish(ma)
publishedMessages.append( (topic,ma) )
def plotImage(self, I, frameId=None, topic=None):
img = ImageMarker(frameId=frameId, topic=topic)
img.addImage(I)
self.plot(img)
def plotText(self, text, position, scale, frameId=None, topic=None):
textMarker = Text.TextMarker(frameId=frameId, topic=topic, scale=scale)
textMarker.setText(text, position)
self.plot(textMarker) | def getDefaultMarkerArrayTopic(self):
return self._defaultMarkerArrayTopic
|
macros.rs | #![macro_export]
#[cfg(feature = "debug_output")]
macro_rules! debug_print
{
($( $args:expr ),*) => { println!( $( $args ),* ); }
}
#[cfg(not(feature = "debug_output"))]
macro_rules! debug_print {
($( $args:expr ),*) => {};
}
| macro_rules! info_print
{
($( $args:expr ),*) => { println!( $( $args ),* ); }
}
#[cfg(not(feature = "info_output"))]
macro_rules! info_print {
($( $args:expr ),*) => {};
}
#[cfg(feature = "thread_output")]
macro_rules! thread_print
{
($( $args:expr ),*) => { println!( $( $args ),* ); }
}
#[cfg(not(feature = "thread_output"))]
macro_rules! thread_print {
($( $args:expr ),*) => {};
} | #[cfg(feature = "info_output")] |
espoo.py | # -*- coding: utf-8 -*-
import re
import logging
import time
from datetime import datetime, timedelta
import requests
import bleach
import dateutil.parser
import pytz
import requests_cache
from django.utils.html import strip_tags
from events.models import (
DataSource,
Event,
Keyword,
Place
)
from django_orghierarchy.models import Organization
from pytz import timezone
from .base import Importer, recur_dict, register_importer
from .yso import KEYWORDS_TO_ADD_TO_AUDIENCE
from .sync import ModelSyncher
# Maximum number of attempts to fetch the event from the API before giving up
MAX_RETRY = 5
YSO_BASE_URL = 'http://www.yso.fi/onto/yso/'
YSO_KEYWORD_MAPS = {
u'koululaiset ja opiskelijat': (u'p16485', u'p16486'),
u'yhdistykset ja seurat': u'p1393', # both words seems to mean associations
u'näyttelyt ja tapahtumat': (u'p5121', u'p2108'),
u'nuoriso': u'p11617',
u'koulutus, kurssit ja luennot': (u'p84', u'p9270', u'p15875'),
u'stand up ja esittävä taide': (u'p9244', u'p2850'),
u'nuorisotyö': u'p1925',
u'ohjaus, neuvonta ja tuki': (u'p178', u'p23'),
u'terveys ja hyvinvointi': u'p22036',
u'ilmastonmuutos': u'p5729',
u'leirit, matkat ja retket': (u'p143', u'p366', u'p25261'),
u'kerhot ja kurssit': (u'p7642', u'p9270'),
u'internet': u'p20405',
u'tapahtumat': u'p2108',
u'asukastoiminta': u'p2250',
u'rakentaminen': u'p3673',
u'kaavoitus': u'p8268',
u'laitteet ja työtilat': (u'p2442', u'p546'), # -> Laitteet, työtilat
u'museot': u'p4934',
u'museot ja kuvataide': (u'p4934', u'p2739'), # -> museot, kuvataide
u'näyttelyt ja galleriat': (u'p5121', u'p6044'), # -> Näyttelyt, galleriat
u'musiikki': u'p1808',
u'teatteri': u'p2625',
u'kevyt liikenne': u'p4288',
u'liikenne': u'p3466',
u'tiet ja kadut': (u'p1210', u'p8317'), # -> Tiet, kadut
u'liikuntapalvelut': u'p9824',
u'liikuntapaikat': u'p5871',
u'luonto- ja ulkoilureitit': (u'p13084', u'p5350'), # -> Luonto, ulkoilureitit
u'uimahallit': u'p9415',
u'ulkoilualueet': u'p4858',
u'urheilu- ja liikuntajärjestöt': (u'p965', u'p25543'), # -> Urheilu, liikuntajärjestöt
u'virkistysalueet': u'p4058',
u'bändit': u'p5072',
u'nuorisotilat': u'p17790',
u'aikuiskoulutus': u'p300',
u'korkeakouluopetus': u'p1246',
u'perusopetus': u'p19327',
u'päivähoito (lapsille)': u'p3523', # -> Päivähoito
u'lapsille': u'p4354', # lapset (ikäryhmät)
u'elokuva': u'p16327', # elokuva (taiteet)
u'elokuvat': u'p16327', # elokuva (taiteet)
u'musiikki ja konsertit': (u'p1808', u'p11185'), # Musiikki, konsertit
u'liikunta, ulkoilu ja urheilu': (u'p916', u'p2771', u'p965'),
u'liikuntalajit': u'p916',
u'ohjattu liikunta': u'p916',
u'harrastus- ja kerhotoiminta': (u'p2901', u'p7642', u'p8090'), # Harrastus, Kerho, toiminta
u'perheet': u'p4363', # perheet (ryhmät)
u'koko perheelle': u'p4363',
u'yrittäjät ja yritykset': (u'p1178', u'p3128'),
u'yrittäjät': u'p1178',
u'lapset': u'p4354',
u'kirjastot': u'p2787',
u'opiskelijat': u'p16486',
u'konsertit ja klubit': (u'p11185', u'p20421'), # -> konsertit, musiikkiklubit
u'kurssit': u'p9270',
u'venäjä': u'p7643', # -> venäjän kieli
u'seniorit': u'p2434', # -> vanhukset
u'senioreille': u'p2434', # -> vanhukset
u'senioripalvelut': u'p2434',
u'näyttelyt': u'p5121',
u'kirjallisuus': u'p8113',
u'kielikahvilat ja keskusteluryhmät': u'p18105', # -> keskusteluryhmät
u'maahanmuuttajat': u'p6165',
u'opastukset ja kurssit': (u'p2149', u'p9270'), # -> opastus, kurssit
u'nuoret': u'p11617',
u'pelitapahtumat': u'p6062', # -> pelit
u'satutunnit': u'p14710',
u'koululaiset': u'p16485',
u'lasten ja nuorten tapahtumat': (u'p4354', u'p11617'), # -> lapset, nuoret
u'lapset ja perheet': (u'p4354', u'p4363'), # -> lapset, perheet
u'lukupiirit': u'p11406', # -> lukeminen
u'asuminen ja ympäristö': u'p1797', # -> asuminen
u'ympäristö ja luonto': u'p13084', # -> luonto
u'tanssi ja voimistelu': (u'p1278', u'p963'), # -> tanssi, voimistelu
u'tanssi ja sirkus': (u'p1278', u'p5007'), # -> tanssi, sirkus,
u'sosiaali- ja terveyspalvelut': (u'p1307', u'p3307'), # -> sosiaalipalvelut, terveyspalvelut
u'hyvinvointi ja terveys': (u'p22036', u'p2762'), # -> hyvinvointi, terveys
u'asemakaava': u'p8268',
u'asemakaavat': u'p8268',
u'asemakaavoituskohteet': u'p8268',
}
# retain the above for simplicity, even if espoo importer internally requires full keyword ids
KEYWORDS_TO_ADD_TO_AUDIENCE = ['yso:{}'.format(i) for i in KEYWORDS_TO_ADD_TO_AUDIENCE]
# certain classifications are too general, or locations that do not belong to keywords
CLASSIFICATIONS_TO_DISREGARD = [
'tapahtumat',
'kulttuuri',
'kulttuuri ja liikunta',
'kulttuuri ja liikunta ',
'kaikki tapahtumat',
'muut tapahtumat',
'sellosali',
'espoon kulttuurikeskus',
'espoon kaupunginmuseo',
'kamu',
'näyttelykeskus weegee',
'karatalo',
'ohjelmisto',
'kulttuurikohteet ja -toimijat',
'espoo.fi',
'kulttuuriespoo.fi',
'kulttuurikeskukset ja -talot'
]
LOCATIONS = {
# Place name in Finnish -> ((place node ids in event feed), tprek id)
u'matinkylän asukaspuisto': ((15728,), 20267),
u'soukan asukaspuisto': ((15740,), 20355),
u'espoon kulttuurikeskus': ((15325,), 20402),
u'näyttelykeskus weegee': ((15349,), 20404),
u'KAMU': ((28944,), 20405),
u'Karatalo': ((15357,), 21432),
u'Nuuksio': ((15041,), 28401),
}
ESPOO_BASE_URL = 'http://www.espoo.fi'
ESPOO_API_URL = (
ESPOO_BASE_URL + '/api/opennc/v1/ContentLanguages({lang_code})'
'/Contents?$filter=TemplateId eq 58&$expand=ExtendedProperties,LanguageVersions'
'&$orderby=EventEndDate desc&$format=json'
)
ESPOO_LANGUAGES = {
'fi': 1,
'sv': 3,
'en': 2,
}
LOCAL_TZ = timezone('Europe/Helsinki')
def get_lang(lang_id):
for code, lid in ESPOO_LANGUAGES.items():
if lid == lang_id:
return code
return None
def clean_text(text, strip_newlines=False):
text = text.replace('\xa0', ' ').replace('\x1f', '')
if strip_newlines:
text = text.replace('\r', '').replace('\n', ' ')
# remove consecutive whitespaces
return re.sub(r'\s\s+', ' ', text, re.U).strip()
def mark_deleted(obj):
if obj.deleted:
return False
obj.deleted = True
obj.save(update_fields=['deleted'])
return True
def clean_street_address(address):
logger = logging.getLogger(__name__)
LATIN1_CHARSET = u'a-zàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ'
address = address.strip()
pattern = re.compile(r'([%s\ -]*[0-9-\ ]*\ ?[a-z]{0,2}),?\ *(0?2[0-9]{3})?\ *(espoo|esbo)?' % LATIN1_CHARSET, re.I)
match = pattern.match(address)
if not match:
logger.warning('Address not matching %s' % address)
return {}
groups = match.groups()
street_address = groups[0]
postal_code = None
city = None
if len(groups) == 2:
city = groups[1]
elif len(groups) == 3:
postal_code = groups[1]
city = groups[2]
return {
'street_address': clean_text(street_address) or '',
'postal_code': postal_code or '',
'address_locality': city or '',
}
def clean_url(url):
"""
Extract the url from the html tag if any or return the cleaned text.
"""
matches = re.findall(r'href=["\'](.*?)["\']', url)
if matches:
return matches[0]
return clean_text(url)
class APIBrokenError(Exception):
pass
@register_importer
class EspooImporter(Importer):
name = "espoo"
supported_languages = ['fi', 'sv', 'en']
keyword_cache = {}
location_cache = {}
def _build_cache_places(self):
loc_id_list = [l[1] for l in LOCATIONS.values()]
place_list = Place.objects.filter(
data_source=self.tprek_data_source
).filter(origin_id__in=loc_id_list)
self.tprek_by_id = {p.origin_id: p.id for p in place_list}
def _cache_yso_keywords(self):
try:
yso_data_source = DataSource.objects.get(id='yso')
except DataSource.DoesNotExist:
self.keyword_by_id = {}
return
cat_id_set = set()
for yso_val in YSO_KEYWORD_MAPS.values():
if isinstance(yso_val, tuple):
for t_v in yso_val:
cat_id_set.add('yso:' + t_v)
else:
cat_id_set.add('yso:' + yso_val)
keyword_list = Keyword.objects.filter(data_source=yso_data_source).filter(id__in=cat_id_set)
self.keyword_by_id = {p.id: p for p in keyword_list}
def setup(self):
self.tprek_data_source = DataSource.objects.get(id='tprek')
ds_args = dict(id=self.name)
ds_defaults = dict(name='City of Espoo')
self.data_source, _ = DataSource.objects.get_or_create(defaults=ds_defaults, **ds_args)
org_args = dict(origin_id='kaupunki', data_source=self.data_source)
org_defaults = dict(name='Espoon kaupunki')
self.organization, _ = Organization.objects.get_or_create(defaults=org_defaults, **org_args)
self._build_cache_places()
self._cache_yso_keywords()
if self.options['cached']:
requests_cache.install_cache('espoo')
self.cache = requests_cache.get_cache()
else:
self.cache = None
@staticmethod
def _get_extended_properties(event_el):
ext_props = recur_dict()
for prop in event_el['ExtendedProperties']:
for data_type in ('Text', 'Number', 'Date'):
if prop[data_type]:
ext_props[prop['Name']] = prop[data_type]
return ext_props
def _get_next_place_id(self, origin):
"""
Return the next sequential place id for the provided origin
"""
last_place = Place.objects.filter(data_source_id=origin).extra({
'id_uint': 'CAST(origin_id as INTEGER)'
}).order_by('-id_uint').first()
_id = 1
if last_place:
_id = int(last_place.origin_id) + 1
return _id
def get_or_create_place_id(self, street_address):
"""
Return the id of the event place corresponding to the street_address.
Create the event place if not found.
Espoo website does not maintain a place object with a dedicated id.
This function tries to map the address to an existing place or create
a new one if no place is found.
"""
address_data = clean_street_address(street_address)
street_address = address_data.get('street_address', None)
if not street_address:
return
espoo_loc_id = self.location_cache.get(street_address, None)
if espoo_loc_id:
return espoo_loc_id
places = Place.objects.filter(deleted=False, street_address__icontains='%s' % street_address).order_by('id')
place = places.first() # Choose one place arbitrarily if many.
if len(places) > 1:
self.logger.warning('Several tprek_id match the address "%s".' % street_address)
if not place:
origin_id = self._get_next_place_id("espoo")
address_data.update({
'publisher': self.organization,
'origin_id': origin_id,
'id': 'espoo:%s' % origin_id,
'data_source': self.data_source,
})
place = Place(**address_data)
place.save()
# Cached the location to speed up
self.location_cache.update({street_address: place.id})
return place.id
def _map_classification_keywords_from_dict(self, classification_node_name):
"""
Try to map the classification to yso keyword using the hardcoded dictionary
YSO_KEYWORD_MAPS.
:param classification_node_name: The node name of the classification element
:type classification_node_name: String
:rtype: set of keywords
"""
event_keywords = set()
if not self.keyword_by_id:
return
def yso_to_db(v):
return self.keyword_by_id['yso:%s' % v]
node_name_lower = classification_node_name.lower() # Use lower case to get ride of case sensitivity
if node_name_lower in YSO_KEYWORD_MAPS.keys():
yso = YSO_KEYWORD_MAPS[node_name_lower]
if isinstance(yso, tuple):
for t_v in yso:
event_keywords.add(yso_to_db(t_v))
else:
event_keywords.add(yso_to_db(yso))
return event_keywords
def _map_classification_keywords_from_db(self, classification_node_name, lang):
"""
Try to map the classification to an yso keyword using the keyword name from the YSO
stored keywords. If not available, tries to map it to an espoo keywords.
:param classification_node_name: The node name of the classification element
:type classification_node_name: String
:rtype: set containing the keyword
"""
yso_data_source = DataSource.objects.get(id='yso')
espoo_data_source = DataSource.objects.get(id='espoo')
node_name = classification_node_name.strip()
query = Keyword.objects.filter(data_source__in=[yso_data_source, espoo_data_source])\
.order_by('-data_source_id')
if not lang:
keyword = query.filter(name__iexact=node_name).first()
if lang == 'fi':
keyword = query.filter(name_fi__iexact=node_name).first()
if lang == 'sv':
keyword = query.filter(name_sv__iexact=node_name).first()
if lang == 'en':
keyword = query.filter(name_en__iexact=node_name).first()
if not keyword:
return set()
self.keyword_by_id.update({keyword.id: keyword})
return {keyword}
def _get_classification_keywords(self, classification_node_name, lang):
"""
Try to map the classification node name to a yso keyword
The mapping is done first using the hard-coded list YSO_KEYWORD_MAPS, then
by querying the saved yso keywords.
:param classification_node_name: The node name of the classification element
:type classification_node_name: String
:rtype: list of yso keywords
"""
event_keywords = self._map_classification_keywords_from_dict(classification_node_name)
if event_keywords:
return event_keywords
keywords = self._map_classification_keywords_from_db(classification_node_name, lang)
if lang == 'fi' and not keywords:
self.logger.warning('Cannot find yso classification for keyword: %s' % classification_node_name)
return set()
self.keyword_by_id.update(dict({k.id: k for k in keywords}))
return keywords
def _import_event(self, lang, event_el, events):
# Times are in Helsinki timezone
def to_utc(dt):
return LOCAL_TZ.localize(dt, is_dst=None).astimezone(pytz.utc)
def dt_parse(dt_str):
return to_utc(dateutil.parser.parse(dt_str))
start_time = dt_parse(event_el['EventStartDate'])
end_time = dt_parse(event_el['EventEndDate'])
# Import only at most one month old events
if end_time < datetime.now().replace(tzinfo=LOCAL_TZ) - timedelta(days=31):
return {'start_time': start_time, 'end_time': end_time}
eid = int(event_el['ContentId'])
event = None
if lang != 'fi':
fi_ver_ids = [int(x['ContentId']) for x in event_el['LanguageVersions'] if x['LanguageId'] == 1]
fi_event = None
for fi_id in fi_ver_ids:
if fi_id not in events:
continue
fi_event = events[fi_id]
if fi_event['start_time'] != start_time or fi_event['end_time'] != end_time:
continue
event = fi_event
break
if not event:
event = events[eid]
event['id'] = '%s:%s' % (self.data_source.id, eid)
event['origin_id'] = eid
event['data_source'] = self.data_source
event['publisher'] = self.organization
ext_props = EspooImporter._get_extended_properties(event_el)
if 'name' in ext_props:
event['name'][lang] = clean_text(ext_props['name'], True)
del ext_props['name']
if ext_props.get('EventDescription', ''):
desc = ext_props['EventDescription']
ok_tags = ('u', 'b', 'h2', 'h3', 'em', 'ul', 'li', 'strong', 'br', 'p', 'a')
desc = bleach.clean(desc, tags=ok_tags, strip=True)
event['description'][lang] = clean_text(desc)
del ext_props['EventDescription']
if ext_props.get('LiftContent', ''):
text = ext_props['LiftContent']
text = clean_text(strip_tags(text))
event['short_description'][lang] = text
del ext_props['LiftContent']
if 'offers' not in event:
event['offers'] = [recur_dict()]
offer = event['offers'][0]
has_offer = False
offer['event_id'] = event['id']
if ext_props.get('Price', ''):
text = clean_text(ext_props['Price'])
offer['price'][lang] = text | del ext_props['Price']
has_offer = True
if text.startswith('Vapaa pääsy') or text.startswith('Fritt inträde'):
offer['is_free'] = True
if ext_props.get('TicketLinks', ''):
offer['info_url'][lang] = clean_url(ext_props['TicketLinks'])
del ext_props['TicketLinks']
has_offer = True
if ext_props.get('Tickets', ''):
offer['description'][lang] = ext_props['Tickets']
del ext_props['Tickets']
has_offer = True
if not has_offer:
del event['offers']
if ext_props.get('URL', ''):
event['info_url'][lang] = clean_url(ext_props['URL'])
del ext_props['URL']
if ext_props.get('Organizer', ''):
event['provider'][lang] = clean_text(ext_props['Organizer'])
del ext_props['Organizer']
if 'LiftPicture' in ext_props:
matches = re.findall(r'src="(.*?)"', str(ext_props['LiftPicture']))
if matches:
img_url = matches[0]
event['image'] = img_url
del ext_props['LiftPicture']
event['url'][lang] = '%s/api/opennc/v1/Contents(%s)' % (
ESPOO_BASE_URL, eid
)
def set_attr(field_name, val):
if event.get(field_name, val) != val:
self.logger.warning('Event %s: %s mismatch (%s vs. %s)' %
(eid, field_name, event[field_name], val))
return
event[field_name] = val
if 'date_published' not in event:
# Publication date changed based on language version, so we make sure
# to save it only from the primary event.
event['date_published'] = dt_parse(event_el['PublicDate'])
set_attr('start_time', dt_parse(event_el['EventStartDate']))
set_attr('end_time', dt_parse(event_el['EventEndDate']))
def to_tprek_id(k):
return self.tprek_by_id[str(k).lower()]
def to_le_id(nid):
return next((to_tprek_id(v[1]) for k, v in LOCATIONS.items() if nid in v[0]), None)
event_keywords = event.get('keywords', set())
event_audience = event.get('audience', set())
for classification in event_el['Classifications']:
# Save original keyword in the raw too
# node_id = classification['NodeId']
# name = classification['NodeName']
node_type = classification['Type']
# Do not use espoo keywords at all
# # Tapahtumat exists tens of times, use pseudo id
# if name in ('Tapahtumat', 'Events', 'Evenemang'):
# node_id = 1 # pseudo id
# keyword_id = 'espoo:{}'.format(node_id)
# kwargs = {
# 'id': keyword_id,
# 'origin_id': node_id,
# 'data_source_id': 'espoo',
# }
# if name in self.keyword_cache:
# keyword_orig = self.keyword_cache[name]
# created = False
# else:
# keyword_orig, created = Keyword.objects.get_or_create(**kwargs)
# self.keyword_cache[name] = keyword_orig
#
# name_key = 'name_{}'.format(lang)
# if created:
# keyword_orig.name = name # Assume default lang Finnish
# # Set explicitly modeltranslation field
# setattr(keyword_orig, name_key, name)
# keyword_orig.save()
# else:
# current_name = getattr(keyword_orig, name_key)
# if not current_name: # is None or empty
# setattr(keyword_orig, name_key, name)
# keyword_orig.save()
#
# event_keywords.add(keyword_orig)
# Several nodes might match to location, do not classify them further
location_id = to_le_id(classification['NodeId'])
if location_id:
if 'location' not in event:
event['location']['id'] = location_id
continue
# Type 12 node refers to presence online
if node_type == 12:
continue
# disregard certain keywords that are pure spam
if classification['NodeName'].lower() in CLASSIFICATIONS_TO_DISREGARD:
continue
node_name = str(classification['NodeName']).lower()
yso_keywords = self._get_classification_keywords(node_name, lang)
event_keywords = event_keywords.union(yso_keywords)
# add audience keywords to audience too
for keyword in yso_keywords:
if keyword.id in KEYWORDS_TO_ADD_TO_AUDIENCE:
event_audience.add(keyword)
event['keywords'] = event_keywords
event['audience'] = event_audience
if ext_props.get('StreetAddress', None):
if 'location' in event:
# Already assigned a location, sets the address as location extra info
event['location']['extra_info'][lang] = ext_props.get('StreetAddress')
else:
# Get the place using the address, or create a new place
place_id = self.get_or_create_place_id(ext_props['StreetAddress'])
if place_id:
event['location']['id'] = place_id
else:
self.logger.warning('Cannot find %s' % ext_props['StreetAddress'])
del ext_props['StreetAddress']
if ext_props.get('EventLocation', ''):
event['location']['extra_info'][lang] = clean_text(ext_props['EventLocation'])
del ext_props['EventLocation']
if 'location' not in event:
self.logger.warning('Missing TPREK location map for event %s (%s)' %
(event['name'][lang], str(eid)))
del events[event['origin_id']]
return event
# Espoo custom data not needed at the moment
# for p_k, p_v in ext_props.items():
# if p_k == 'ExternalVideoLink' and p_v == 'http://':
# continue
# event['custom_data'][p_k] = p_v
return event
def _recur_fetch_paginated_url(self, url, lang, events):
for _ in range(MAX_RETRY):
response = requests.get(url)
if response.status_code != 200:
self.logger.error("Espoo API reported HTTP %d" % response.status_code)
time.sleep(5)
if self.cache:
self.cache.delete_url(url)
continue
try:
root_doc = response.json()
except ValueError:
self.logger.error("Espoo API returned invalid JSON for url: %s" % url)
if self.cache:
self.cache.delete_url(url)
time.sleep(5)
continue
break
else:
self.logger.error("Espoo API is broken, giving up")
raise APIBrokenError()
documents = root_doc['value']
earliest_end_time = None
for doc in documents:
event = self._import_event(lang, doc, events)
if not earliest_end_time or event['end_time'] < earliest_end_time:
earliest_end_time = event['end_time']
now = datetime.now().replace(tzinfo=LOCAL_TZ)
# We check 31 days backwards.
if earliest_end_time and earliest_end_time < now - timedelta(days=31):
return
if 'odata.nextLink' in root_doc:
self._recur_fetch_paginated_url(
'%s/api/opennc/v1/%s%s' % (
ESPOO_BASE_URL,
root_doc['odata.nextLink'],
"&$format=json"
), lang, events)
def import_events(self):
print("Importing Espoo events")
events = recur_dict()
for lang in self.supported_languages:
espoo_lang_id = ESPOO_LANGUAGES[lang]
url = ESPOO_API_URL.format(lang_code=espoo_lang_id)
print("Processing lang " + lang)
print("from URL " + url)
try:
self._recur_fetch_paginated_url(url, lang, events)
except APIBrokenError:
return
event_list = sorted(events.values(), key=lambda x: x['end_time'])
qs = Event.objects.filter(end_time__gte=datetime.now(),
data_source='espoo', deleted=False)
self.syncher = ModelSyncher(qs, lambda obj: obj.origin_id, delete_func=mark_deleted)
for event in event_list:
obj = self.save_event(event)
self.syncher.mark(obj)
self.syncher.finish()
print("%d events processed" % len(events.values())) | |
deterministic.py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Deterministic distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
# Dependency imports
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.internal import assert_util | from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'Deterministic',
'VectorDeterministic',
]
@six.add_metaclass(abc.ABCMeta)
class _BaseDeterministic(distribution.Distribution):
"""Base class for Deterministic distributions."""
def __init__(self,
loc,
atol=None,
rtol=None,
is_vector=False,
validate_args=False,
allow_nan_stats=True,
parameters=None,
name='_BaseDeterministic'):
"""Initialize a batch of `_BaseDeterministic` distributions.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor`. The point (or batch of points) on which this
distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
is_vector: Python `bool`. If `True`, this is for `VectorDeterministic`,
else `Deterministic`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
parameters: Dict of locals to facilitate copy construction.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: If `loc` is a scalar.
"""
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, atol, rtol], dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, dtype_hint=dtype, name='loc')
self._atol = tensor_util.convert_nonref_to_tensor(
0 if atol is None else atol, dtype=dtype, name='atol')
self._rtol = tensor_util.convert_nonref_to_tensor(
0 if rtol is None else rtol, dtype=dtype, name='rtol')
self._is_vector = is_vector
super(_BaseDeterministic, self).__init__(
dtype=self._loc.dtype,
reparameterization_type=(
reparameterization.FULLY_REPARAMETERIZED
if dtype_util.is_floating(self._loc.dtype)
else reparameterization.NOT_REPARAMETERIZED),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
def _slack(self, loc):
# Avoid using the large broadcast with self.loc if possible.
if self.parameters['rtol'] is None:
return self.atol
else:
return self.atol + self.rtol * tf.abs(loc)
@property
def loc(self):
"""Point (or batch of points) at which this distribution is supported."""
return self._loc
@property
def atol(self):
"""Absolute tolerance for comparing points to `self.loc`."""
return self._atol
@property
def rtol(self):
"""Relative tolerance for comparing points to `self.loc`."""
return self._rtol
def _entropy(self):
return tf.zeros(self.batch_shape_tensor(), dtype=self.dtype)
def _mean(self):
return tf.identity(self.loc)
def _variance(self):
return tf.zeros_like(self.loc)
def _mode(self):
return self.mean()
def _sample_n(self, n, seed=None):
del seed # unused
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(
loc,
tf.concat([[n], self._batch_shape_tensor(loc=loc),
self._event_shape_tensor(loc=loc)],
axis=0))
def _default_event_space_bijector(self):
return
def _parameter_control_dependencies(self, is_init):
assertions = []
# In init, we can always build shape and dtype checks because
# we assume shape doesn't change for Variable backed args.
if is_init and self._is_vector:
msg = 'Argument `loc` must be at least rank 1.'
if tensorshape_util.rank(self.loc.shape) is not None:
if tensorshape_util.rank(self.loc.shape) < 1:
raise ValueError(msg)
elif self.validate_args:
assertions.append(
assert_util.assert_rank_at_least(self.loc, 1, message=msg))
if not self.validate_args:
assert not assertions # Should never happen
return []
if is_init != tensor_util.is_ref(self.atol):
assertions.append(
assert_util.assert_non_negative(
self.atol, message='Argument "atol" must be non-negative'))
if is_init != tensor_util.is_ref(self.rtol):
assertions.append(
assert_util.assert_non_negative(
self.rtol, message='Argument "rtol" must be non-negative'))
return assertions
class Deterministic(_BaseDeterministic):
"""Scalar `Deterministic` distribution on the real line.
The scalar `Deterministic` distribution is parameterized by a [batch] point
`loc` on the real line. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) and cumulative distribution function (cdf)
are
```none
pmf(x; loc) = 1, if x == loc, else 0
cdf(x; loc) = 1, if x >= loc, else 0
```
#### Examples
```python
# Initialize a single Deterministic supported at zero.
constant = tfp.distributions.Deterministic(0.)
constant.prob(0.)
==> 1.
constant.prob(2.)
==> 0.
# Initialize a [2, 2] batch of scalar constants.
loc = [[0., 1.], [2., 3.]]
x = [[0., 1.1], [1.99, 3.]]
constant = tfp.distributions.Deterministic(loc)
constant.prob(x)
==> [[1., 0.], [0., 1.]]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='Deterministic'):
"""Initialize a scalar `Deterministic` distribution.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb]`, with `b >= 0`.
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(Deterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=0, atol=0, rtol=0)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol), tf.shape(self.rtol)))
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))
def _event_shape_tensor(self, loc=None):
del loc
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
# Enforces dtype of probability to be float, when self.dtype is not.
prob_dtype = self.dtype if dtype_util.is_floating(
self.dtype) else tf.float32
return tf.cast(tf.abs(x - loc) <= self._slack(loc), dtype=prob_dtype)
def _cdf(self, x):
loc = tf.identity(self.loc)
return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)
class VectorDeterministic(_BaseDeterministic):
"""Vector `Deterministic` distribution on `R^k`.
The `VectorDeterministic` distribution is parameterized by a [batch] point
`loc in R^k`. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) is
```none
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise.
```
#### Examples
```python
tfd = tfp.distributions
# Initialize a single VectorDeterministic supported at [0., 2.] in R^2.
constant = tfd.Deterministic([0., 2.])
constant.prob([0., 2.])
==> 1.
constant.prob([0., 3.])
==> 0.
# Initialize a [3] batch of constants on R^2.
loc = [[0., 1.], [2., 3.], [4., 5.]]
constant = tfd.VectorDeterministic(loc)
constant.prob([[0., 1.], [1.9, 3.], [3.99, 5.]])
==> [1., 0., 0.]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name='VectorDeterministic'):
"""Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
Note that there is only one point in `R^0`, the 'point' `[]`. So if `k = 0`
then `self.prob([]) == 1`.
The `atol` and `rtol` parameters allow for some slack in `pmf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb, k]`, with `b >= 0`, `k >= 0`
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=1, atol=1, rtol=1)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol),
tf.shape(self.rtol)))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))[:-1]
def _event_shape_tensor(self, loc=None):
return tf.shape(self.loc if loc is None else loc)[-1:]
def _event_shape(self):
return self.loc.shape[-1:]
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
return tf.cast(
tf.reduce_all(tf.abs(x - loc) <= self._slack(loc), axis=-1),
dtype=self.dtype)
def _sample_control_dependencies(self, x):
assertions = []
if not self.validate_args:
return assertions
assertions.append(assert_util.assert_rank_at_least(x, 1))
assertions.append(assert_util.assert_equal(
self.event_shape_tensor(), tf.gather(tf.shape(x), tf.rank(x) - 1),
message=('Argument `x` not defined in the same space '
'R**k as this distribution')))
return assertions
@kullback_leibler.RegisterKL(_BaseDeterministic, distribution.Distribution)
def _kl_deterministic_distribution(a, b, name=None):
"""Calculate the batched KL divergence `KL(a || b)` with `a` Deterministic.
Args:
a: instance of a Deterministic distribution object.
b: instance of a Distribution distribution object.
name: (optional) Name to use for created operations. Default is
'kl_deterministic_distribution'.
Returns:
Batchwise `KL(a || b)`.
"""
with tf.name_scope(name or 'kl_deterministic_distribution'):
return -b.log_prob(a.loc) | |
mbc0.rs | use super::MemoryBankController;
pub struct Mbc0 {
rom: Vec<u8>,
ram: Vec<u8>,
}
impl Mbc0 {
pub fn new(data: Vec<u8>) -> Self {
Mbc0 {
rom: data,
ram: vec![0x00; 0x2000],
}
}
}
impl MemoryBankController for Mbc0 {
fn read(&self, addr: u16) -> u8 {
let addr = addr as usize;
match addr {
0x0000..=0x7FFF => self.rom[addr],
0xA000..=0xBFFF => self.ram[addr - 0xA000],
_ => panic!("inaccessible address"),
}
}
fn write(&mut self, addr: u16, data: u8) |
}
| {
let addr = addr as usize;
match addr {
0xA000..=0xBFFF => self.ram[addr - 0xA000] = data,
_ => { /* TODO: Consider if this case should be error */ }
};
} |
payment-button.js | "use strict";
window.addEventListener('load', function() {
console.log(disabledButton)
changeInCheckoutForm();
});
var disabledButton = document.getElementById('js-disabled-button')
var errMsg
var fillFlag;
var emailFlag;
var paymentFlag;
var termsFlag;
function changeInCheckoutForm() {
resetMessageFlags()
console.log('change detected')
let checkoutForm = document.getElementById('js-checkout-form');
let buttonDiv = document.getElementById('js-checkout-button-div');
let paymentMethod = getRadiosValue()
console.log(paymentMethod)
let emailok = false;
// checkCartPrice();
emailok = validateemail();
console.log(emailok);
buttonDiv.innerHTML = "";
buttonDiv.appendChild(disabledButton);
let customer_email = document.getElementById('id_email').value
if (isRequiredFilled(checkoutForm) && emailok) {
loadPaymentButton(checkoutForm, buttonDiv, paymentMethod, customer_email);
}
errorMessage();
};
function loadPaymentButton(checkoutForm, buttonDiv, paymentMethod, email) {
let request;
let params = "payment=" + paymentMethod + "&customer_email=" + email
request = new XMLHttpRequest();
request.open("POST", checkoutForm.dataset.url, true)
request.responseType = "document";
request.send(JSON.stringify(
{
"payment": paymentMethod,
"email": email, | ));
request.onreadystatechange = function(self) {
if(request.readyState == 4) {
let childrenToAdd = this.response.body.children
console.log(childrenToAdd)
buttonDiv.innerHTML = "";
for (let i = 0; i < childrenToAdd.length; i++) {
console.log(childrenToAdd[i])
buttonDiv.appendChild(childrenToAdd[i])
}
}
}
};
function validateemail() {
var x=document.getElementById('id_email').value;
var atposition=x.indexOf("@");
var dotposition=x.lastIndexOf(".");
if (atposition<1 || dotposition<atposition+2 || dotposition+2>=x.length){
emailFlag = true;
return false;
} else {
return true;
}
} ;
function isRequiredFilled(form) {
let requiredFields = form.querySelectorAll("[required]");
let allAreFilled = true;
let radioCheck = false;
requiredFields.forEach(
function(i) {
if (i.type === "radio") {
console.log('radiocheck')
if (i.checked) {
radioCheck = true;
paymentFlag = false;
}
}
if (i.type === "checkbox") {
if (!i.checked) {
allAreFilled = false;
termsFlag = true;
}
}
if (!i.value) {
console.log('not all filled')
allAreFilled = false;
fillFlag = true;
}
}
)
if (allAreFilled && radioCheck) {
return true;
} else {
return false;
}
};
function getRadiosValue() {
let radios = document.getElementsByName('payment')
for (let i = 0; i < radios.length; i++) {
if (radios[i].checked) {
return radios[i].value
}
}
};
function errorMessage() {
console.log('errorMessages')
errMsg = disabledButton.dataset.msgfill;
disabledButton.innerText = errMsg;
if (fillFlag) {
errMsg = disabledButton.dataset.msgfill;
} else if (emailFlag) {
errMsg = disabledButton.dataset.msgemail;
} else if (paymentFlag) {
errMsg = disabledButton.dataset.msgpayment;
} else if (termsFlag) {
errMsg = disabledButton.dataset.msgterms;
} else {
errMsg = disabledButton.dataset.msgwait;
}
disabledButton.innerText = errMsg;
};
function resetMessageFlags() {
fillFlag = false;
emailFlag = false;
paymentFlag = true;
termsFlag = false;
};
// function checkCartPrice() {
// let priceElement = document.getElementById('js-price');
// let checkoutForm = document.getElementById('js-checkout-form');
// price = priceElement.dataset.price;
// if (price >= 0){
// }
// return price;
// } | } |
make_docs.py | # ----------------------------------------------------------------------------
# - Open3D: www.open3d.org -
# ----------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2018 www.open3d.org
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# ----------------------------------------------------------------------------
# Sphinx makefile with api docs generation
# (1) The user call `make *` (e.g. `make html`) gets forwarded to make.py
# (2) make.py generate Python api docs, one ".rst" file per class / function
# (3) make.py calls the actual `sphinx-build`
from __future__ import print_function
import argparse
import subprocess
import sys
import multiprocessing
import importlib
import os
from inspect import getmembers, isbuiltin, isclass, ismodule
import shutil
import warnings
import weakref
from tempfile import mkdtemp
def _create_or_clear_dir(dir_path):
if os.path.exists(dir_path):
shutil.rmtree(dir_path)
print("Removed directory %s" % dir_path)
os.makedirs(dir_path)
print("Created directory %s" % dir_path)
class PyAPIDocsBuilder:
"""
Generate Python API *.rst files, per (sub) module, per class, per function.
The file name is the full module name.
E.g. If output_dir == "python_api", the following files are generated:
python_api/open3d.camera.rst
python_api/open3d.camera.PinholeCameraIntrinsic.rst
...
"""
def __init__(self, output_dir, c_module, c_module_relative):
self.output_dir = output_dir
self.c_module = c_module
self.c_module_relative = c_module_relative
print("Generating *.rst Python API docs in directory: %s" %
self.output_dir)
def generate_rst(self):
_create_or_clear_dir(self.output_dir)
main_c_module = importlib.import_module(self.c_module)
sub_module_names = sorted(
[obj[0] for obj in getmembers(main_c_module) if ismodule(obj[1])])
for sub_module_name in sub_module_names:
PyAPIDocsBuilder._generate_sub_module_class_function_docs(
sub_module_name, self.output_dir)
@staticmethod
def _generate_function_doc(sub_module_full_name, function_name,
output_path):
# print("Generating docs: %s" % (output_path,))
out_string = ""
out_string += "%s.%s" % (sub_module_full_name, function_name)
out_string += "\n" + "-" * len(out_string)
out_string += "\n\n" + ".. currentmodule:: %s" % sub_module_full_name
out_string += "\n\n" + ".. autofunction:: %s" % function_name
out_string += "\n"
with open(output_path, "w") as f:
f.write(out_string)
@staticmethod
def _generate_class_doc(sub_module_full_name, class_name, output_path):
# print("Generating docs: %s" % (output_path,))
out_string = ""
out_string += "%s.%s" % (sub_module_full_name, class_name)
out_string += "\n" + "-" * len(out_string)
out_string += "\n\n" + ".. currentmodule:: %s" % sub_module_full_name
out_string += "\n\n" + ".. autoclass:: %s" % class_name
out_string += "\n :members:"
out_string += "\n :undoc-members:"
out_string += "\n :inherited-members:"
out_string += "\n"
with open(output_path, "w") as f:
f.write(out_string)
@staticmethod
def _generate_sub_module_doc(sub_module_name, class_names, function_names,
sub_module_doc_path):
# print("Generating docs: %s" % (sub_module_doc_path,))
class_names = sorted(class_names)
function_names = sorted(function_names)
sub_module_full_name = "open3d.%s" % (sub_module_name,)
out_string = ""
out_string += sub_module_full_name
out_string += "\n" + "-" * len(out_string)
out_string += "\n\n" + ".. currentmodule:: %s" % sub_module_full_name
if len(class_names) > 0:
out_string += "\n\n**Classes**"
out_string += "\n\n.. autosummary::"
out_string += "\n"
for class_name in class_names:
out_string += "\n " + "%s" % (class_name,)
out_string += "\n"
if len(function_names) > 0:
out_string += "\n\n**Functions**"
out_string += "\n\n.. autosummary::"
out_string += "\n"
for function_name in function_names:
out_string += "\n " + "%s" % (function_name,)
out_string += "\n"
obj_names = class_names + function_names
if len(obj_names) > 0:
out_string += "\n\n.. toctree::"
out_string += "\n :hidden:"
out_string += "\n"
for obj_name in obj_names:
out_string += "\n %s <%s.%s>" % (
obj_name,
sub_module_full_name,
obj_name,
)
out_string += "\n"
with open(sub_module_doc_path, "w") as f:
f.write(out_string)
@staticmethod
def _generate_sub_module_class_function_docs(sub_module_name, output_dir):
sub_module = importlib.import_module("open3d.open3d.%s" %
(sub_module_name,))
sub_module_full_name = "open3d.%s" % (sub_module_name,)
print("Generating docs for submodule: %s" % sub_module_full_name)
# Class docs
class_names = [
obj[0] for obj in getmembers(sub_module) if isclass(obj[1])
]
for class_name in class_names:
file_name = "%s.%s.rst" % (sub_module_full_name, class_name)
output_path = os.path.join(output_dir, file_name)
PyAPIDocsBuilder._generate_class_doc(sub_module_full_name,
class_name, output_path)
# Function docs
function_names = [
obj[0] for obj in getmembers(sub_module) if isbuiltin(obj[1])
]
for function_name in function_names:
file_name = "%s.%s.rst" % (sub_module_full_name, function_name)
output_path = os.path.join(output_dir, file_name)
PyAPIDocsBuilder._generate_function_doc(sub_module_full_name,
function_name, output_path)
# Submodule docs
sub_module_doc_path = os.path.join(output_dir,
sub_module_full_name + ".rst")
PyAPIDocsBuilder._generate_sub_module_doc(sub_module_name, class_names,
function_names,
sub_module_doc_path)
class SphinxDocsBuilder:
"""
SphinxDocsBuilder calls Python api docs generation and then calls
sphinx-build:
(1) The user call `make *` (e.g. `make html`) gets forwarded to make.py
(2) Calls PyAPIDocsBuilder to generate Python api docs rst files
(3) Calls `sphinx-build` with the user argument
"""
def __init__(self, html_output_dir, is_release):
# Directory structure for the Open3D Python package:
# open3d
# - __init__.py
# - open3d.so # Actual name depends on OS and Python version
self.c_module = "open3d.open3d" # Points to the open3d.so
self.c_module_relative = "open3d" # The relative module reference to open3d.so
self.python_api_output_dir = "python_api"
self.html_output_dir = html_output_dir
self.is_release = is_release
def run(self):
self._gen_python_api_docs()
self._run_sphinx()
def _gen_python_api_docs(self):
|
def _run_sphinx(self):
"""
Call Sphinx command with hard-coded "html" target
"""
build_dir = os.path.join(self.html_output_dir, "html")
if self.is_release:
version_list = [
line.rstrip('\n').split(' ')[1]
for line in open('../src/Open3D/version.txt')
]
release_version = '.'.join(version_list[:3])
print("Building docs for release:", release_version)
cmd = [
"sphinx-build",
"-b",
"html",
"-D",
"version=" + release_version,
"-D",
"release=" + release_version,
"-j",
str(multiprocessing.cpu_count()),
".",
build_dir,
]
else:
cmd = [
"sphinx-build",
"-b",
"html",
"-j",
str(multiprocessing.cpu_count()),
".",
build_dir,
]
print('Calling: "%s"' % " ".join(cmd))
subprocess.check_call(cmd, stdout=sys.stdout, stderr=sys.stderr)
class DoxygenDocsBuilder:
def __init__(self, html_output_dir):
self.html_output_dir = html_output_dir
def run(self):
doxygen_temp_dir = "doxygen"
_create_or_clear_dir(doxygen_temp_dir)
cmd = ["doxygen", "Doxyfile"]
print('Calling: "%s"' % " ".join(cmd))
subprocess.check_call(cmd, stdout=sys.stdout, stderr=sys.stderr)
shutil.copytree(os.path.join("doxygen", "html"),
os.path.join(self.html_output_dir, "html", "cpp_api"))
if os.path.exists(doxygen_temp_dir):
shutil.rmtree(doxygen_temp_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--sphinx",
dest="build_sphinx",
action="store_true",
default=False,
help="Build Sphinx for main docs and Python API docs.")
parser.add_argument("--doxygen",
dest="build_doxygen",
action="store_true",
default=False,
help="Build Doxygen for C++ API docs.")
parser.add_argument("--is_release",
dest="is_release",
action="store_true",
default=False,
help="Show Open3D version number rather than git hash.")
args = parser.parse_args()
# Clear output dir if new docs are to be built
html_output_dir = "_out"
_create_or_clear_dir(html_output_dir)
# Sphinx is hard-coded to build with the "html" option
# To customize build, run sphinx-build manually
if args.build_sphinx:
print("Sphinx build enabled")
sdb = SphinxDocsBuilder(html_output_dir, args.is_release)
sdb.run()
else:
print("Sphinx build disabled, use --sphinx to enable")
# Doxygen is hard-coded to build with default option
# To customize build, customize Doxyfile or run doxygen manually
if args.build_doxygen:
print("Doxygen build enabled")
ddb = DoxygenDocsBuilder(html_output_dir)
ddb.run()
else:
print("Doxygen build disabled, use --doxygen to enable")
| """
Generate Python docs.
Each module, class and function gets one .rst file.
"""
# self.python_api_output_dir cannot be a temp dir, since other
# "*.rst" files reference it
pd = PyAPIDocsBuilder(self.python_api_output_dir, self.c_module,
self.c_module_relative)
pd.generate_rst() |
__init__.py | from .gfpgan import * |
||
gpa_scatter.py | import helper
import numpy as np
import matplotlib.pyplot as plt |
def plot_gpa_scatter():
"""Plotting scatterplot of grades expected and grade received, using the general department list
"""
# obtaining data
department_df = helper.generate_depts_df(helper.general_dept_list)
comp_criteria = ["AvgGradeExpected","AvgGradeReceived"]
# generating scatterplot graph
lower_bound = 1.5
upper_bound = 4.02
ax = department_df.plot.scatter(x=comp_criteria[0], y=comp_criteria[1], c= "grey",ylim=(lower_bound,upper_bound),xlim=(lower_bound,upper_bound), figsize=(10,10), fontsize=20, alpha = 0.3)
ax.set_xlabel("Average Grade Expected", fontsize = 20)
ax.set_ylabel("Average Grade Received", fontsize = 20)
# computing least squares best fit line and adding it onto graph
y = department_df["AvgGradeReceived"]
x = department_df["AvgGradeExpected"]
A = np.vstack([x, np.ones(len(x))]).T
m, c = np.linalg.lstsq(A, y, rcond=None)[0]
print("m:{}, c:{}".format(m,c))
ax.plot(np.linspace(lower_bound,4,10),np.linspace(lower_bound,4,10),c="red")
ax.plot(np.linspace(lower_bound,4,10),(np.linspace(lower_bound,4,10)*m) + c,c="blue") | import pandas as pd |
lib.rs | use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Data, DeriveInput, Error, Ident, Type};
#[proc_macro_derive(BinaryDeserialize)]
pub fn derive_binary_deserialize(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
match create_binary_deserialize_impl(input) {
Ok(ts) => ts,
Err(err) => err.to_compile_error().into(),
}
}
fn | (input: DeriveInput) -> Result<TokenStream, Error> {
let fields = match input.data {
Data::Struct(ds) => ds.fields,
_ => {
return Err(Error::new_spanned(
input,
"only structs may be derived from BinaryDeserialize",
))
}
};
let field_types: Vec<&Type> = fields.iter().map(|f| &f.ty).collect();
let field_names: Vec<&Ident> = fields.iter().map(|f| f.ident.as_ref().unwrap()).collect();
let struct_name = input.ident;
let tokens = quote! {
impl ::binde::BinaryDeserialize for #struct_name {
const SIZE: usize = 0 #( + <#field_types as ::binde::BinaryDeserialize>::SIZE )*;
fn deserialize<E, R>(mut reader: R) -> ::std::io::Result<Self>
where
E: ::binde::ByteOrder,
R: ::std::io::Read,
{
Ok(Self {
#(
#field_names: ::binde::deserialize::<E, _, _>(&mut reader)?
),*
})
}
}
};
Ok(tokens.into())
}
| create_binary_deserialize_impl |
shortest3.rs | use ::common::*;
use ::lib::unionfind::UnionFind;
use ::State;
#[derive(Serialize, Deserialize, Debug, Default)]
struct AI {
}
pub fn setup(state: &mut State) {
eprintln!("shortest2");
}
fn get_graph(graph: &Vec<Vec<(usize, usize)>>, user: &Vec<usize>, opt: &Vec<usize>, i: usize) -> (Vec<Vec<(usize, usize)>>, Vec<usize>) {
let n = graph.len();
let mut id = vec![!0; n];
let mut stack = vec![];
let mut n2 = 0;
for s in 0..n {
if id[s] != !0 { continue }
id[s] = n2;
stack.push(s);
while let Some(u) = stack.pop() {
for &(v, e) in &graph[u] {
if (user[e] == i || opt[e] == i) && id[v] == !0 {
id[v] = n2;
stack.push(v);
}
}
}
n2 += 1;
}
let mut g = vec![vec![]; n2];
for u in 0..n {
for &(v, e) in &graph[u] {
if user[e] == !0 && id[u] != id[v] {
g[id[u]].push((id[v], e));
}
}
}
(g, id)
}
pub fn play(state: &mut State) -> usize {
let g = state.graph.iter().map(|u| u.iter().map(|&(v, _)| v).collect()).collect();
let dist: Vec<_> = state.mines.iter().map(|&v| ::lib::bfs(&g, v)).collect();
let n = state.graph.len();
let m = state.es.len();
let mut user = vec![!0; state.es.len()];
let mut opt = vec![!0; state.es.len()];
let mut opt_n = vec![0; state.p];
for &(q, e) in &state.moves {
if user[e] == !0 {
user[e] = q;
} else if opt[e] == !0 {
opt[e] = q;
opt_n[q] += 1;
}
}
let mut score = vec![0.0; m];
for e in 0..m {
if user[e] != !0 { score[e] = -1.0 }
}
let ex = 0.4 + 0.5 * (state.es.len() - state.turn) as f64 / state.es.len() as f64;
for q in 0..state.p {
let (g, id) = get_graph(&state.graph, &user, &opt, q);
let n = g.len();
for (i, &s_) in state.mines.iter().enumerate() {
let s = id[s_];
let (mut qs, mut qt) = (0, 0);
let mut que = vec![!0; n];
let mut ds = vec![!0; n];
ds[s] = 0;
que[qt] = s;
qt += 1;
while qs < qt {
let u = que[qs];
qs += 1;
for &(v, _) in &g[u] {
if ds[v] == !0 {
ds[v] = ds[u] + 1;
que[qt] = v;
qt += 1;
}
}
} | sum[id[v]] += (dist[i][v] * dist[i][v]) as f64 * ex.powf(ds[id[v]] as f64);
}
for &u in que[..qt].iter().rev() {
let mut count: usize = 0;
for &(v, _) in &g[u] {
if ds[v] + 1 == ds[u] {
count += 1;
}
}
for &(v, e) in &g[u] {
if ds[v] + 1 == ds[u] {
let w = sum[u] as f64 / count as f64;
if q == state.my {
score[e] += w;
} else {
score[e] += w / (state.p - 1) as f64;
}
sum[v] += w;
}
}
}
}
}
let mut connected = vec![!0; n];
let mut stack = vec![];
for &s in &state.mines {
if connected[s] != !0 { continue }
connected[s] = s;
stack.push(s);
while let Some(u) = stack.pop() {
for &(v, e) in &state.graph[u] {
if (user[e] == state.my || opt[e] == state.my) && connected[v] == !0 {
connected[v] = s;
stack.push(v);
}
}
}
}
let w = if state.p <= 4 { 1.0 } else if state.p <= 8 { 1.2 } else { 2.0 };
for &u in &state.mines {
for &(_, e) in &state.graph[u] {
if connected[state.es[e].0] != connected[state.es[e].1] {
score[e] *= w;
}
}
}
for u in 0..n {
for &(_, e) in &state.graph[u] {
if connected[state.es[e].0] != connected[state.es[e].1] {
score[e] *= w;
}
}
}
if state.settings.options.unwrap_or(false) {
let mut uf = UnionFind::new(n);
for u in 0..n {
for &(v, e) in &state.graph[u] {
if user[e] == state.my {
uf.unite(u, v);
}
}
}
let mut score_opt = vec![0; state.es.len()];
for e in 0..state.es.len() {
if user[e] == !0 || opt[e] != !0 { score_opt[e] = -1; }
}
for i in 0..state.mines.len() {
let mut total = vec![0; n];
for v in 0..n {
total[uf.find(v)] += (dist[i][v] * dist[i][v]) as i64;
}
for e in 0..state.es.len() {
let (u, v) = state.es[e];
if user[e] == !0 || opt[e] != !0 || uf.same(u, v) { continue }
if uf.same(state.mines[i], u) {
score_opt[e] += total[v];
}
if uf.same(state.mines[i], v) {
score_opt[e] += total[u];
}
}
}
let mut e = 0;
for i in 0..m {
if score[e] < score[i] {
e = i;
}
}
let mut e2 = 0;
for i in 0..m {
if score_opt[e2] < score_opt[i] {
e2 = i;
}
}
if opt_n[state.my] < state.mines.len() && score[e] < score_opt[e2] as f64 {
e = e2;
}
e
} else {
let mut e = 0;
for i in 0..m {
if score[e] < score[i] {
e = i;
}
}
e
}
} | let mut sum = vec![0.0; n];
for v in 0..id.len() { |
gamestate.ts | import * as _ from "lodash";
import * as Promise from 'bluebird';
import { getEdn } from '../utils';
const edn = getEdn();
import { getSetup, getFirstSetup } from './setup';
import { TimeOfDay, AbilityType, ParityType, AlignmentAttributesMap, Alignment, AbilityActivationType } from '../constants';
import { Action, abilityFactory, validate, actionResolver, actionDescriber } from './ability';
import { Slot } from './slot';
import bot from '../comm/bot';
import { createPrivateChannel } from '../comm/restCommands';
const shortId = require('shortid');
export interface Phase {
time: TimeOfDay;
num?: number;
}
export interface Vote {
voterId: string;
voteeName?: string;
}
const NOT_VOTING: string = 'Not Voting';
const NO_LYNCH_NAME: string = 'no lynch';
const NO_LYNCH_DISP: string = 'No Lynch';
// game transcending state
let currentSetup;
// faction info
let factionChannels = new Map<Alignment, string>();
// global (semi) permanent state
let currentGameId: string;
let currentPhase: Phase;
// player info
let playerIds: Array<string> = [];
let playerSlots = new Map<string, Slot>();
// global temporary state
let currentActions: Action[] = [];
let currentVotes: Map<string, string[]>;
// night end handler;
let nightEndTimeout;
// slash command entry points
export function setSetup(tag: string): any {
return Promise.try(() => {
if (currentPhase && currentPhase.time === TimeOfDay.WaitingForPlayers) {
const newSetup = getSetup(tag.toLowerCase());
if (newSetup) {
currentSetup = newSetup;
return bot.postPublicMessage(`Setup was changed to ${currentSetup[':name']} (${currentSetup[':slots'].length} players)`);
} else {
throw new Error(`${tag.toLowerCase()} is not a valid setup.`);
}
} else {
throw new Error(`Cannot change setup at this time.`);
}
});
}
export function addPlayer(playerId: string) {
return Promise.try(() => {
const idx = playerIds.indexOf(playerId);
if (playerIds.length >= currentSetup[':slots'].length) {
throw new Error("Game is full!");
} else if (idx === -1) {
playerIds.push(playerId);
return bot.getUserById(playerId)
.then(player => bot.postPublicMessage(`${player.name} has joined.`))
.then(() => {
if (currentSetup && (currentSetup[':slots'].length === playerIds.length)) {
return startGame();
}
});
} else {
throw new Error("You are already signed up!");
}
});
}
export function removePlayer(playerId: string) {
return Promise.try(() => {
const idx = playerIds.indexOf(playerId);
if (idx !== -1) {
playerIds.splice(idx, 1);
return bot.getUserById(playerId)
.then(player => bot.postPublicMessage(`${player.name} has left.`));
} else {
throw new Error("You are not currently signed up.");
}
});
}
export function doVoteCount() {
const vc = getVc();
const message: string[] = ['Votecount:'];
const livingPlayers = getLivingPlayerCount();
const halfPlusOne = Math.floor(livingPlayers / 2) + 1;
return getPlayerUserMap()
.then(userMap => {
vc.forEach(([voteeId, votes]) => {
if (voteeId === NOT_VOTING) {
message.push([
`[${votes.length}] ${NOT_VOTING}: `,
`(${votes.map(vote => userMap.get(vote).name).join(', ')})`
].join(''));
} else if (voteeId === NO_LYNCH_NAME) {
message.push([
`[${votes.length}] ${NO_LYNCH_DISP}: `,
`(${votes.map(vote => userMap.get(vote).name).join(', ')})`
].join(''));
} else {
message.push([
`[${votes.length}] ${userMap.get(voteeId).name}: `,
`(${votes.map(vote => userMap.get(vote).name).join(', ')})`
].join(''));
}
});
message.push('');
message.push(`With ${getLivingPlayerCount()} alive, it is ${halfPlusOne} to lynch.`);
return bot.postPublicMessage(message.join('\n'));
});
}
export function addOrReplaceAction(actorId: string, actionName: string, targetName: string) {
requirePlaying(actorId);
return getPlayerUserMap()
.then(userMap => {
let targetId;
const target = Array.from(userMap.values()).find(user => user.name === targetName.toLowerCase());
if (target) {
targetId = target.id;
}
const livingPlayers = getLivingPlayers();
if (!targetId || !livingPlayers.find(livingPlayer => livingPlayer.playerId === targetId)) {
throw new Error(`No player ${targetName} is currently playing and alive.`);
}
return addOrReplaceFormattedAction({
actor: playerSlots.get(actorId),
abilityType: actionResolver(actionName),
target: targetId == null ? null : playerSlots.get(targetId)
});
});
}
export function setVote({ voterId, voteeName }: Vote) {
requirePlaying(voterId);
if (currentPhase.time !== TimeOfDay.Day) {
throw new Error("You cannot vote right now.");
}
return getPlayerUserMap()
.then(userMap => {
let voteeId;
if (!voteeName) {
voteeId = NOT_VOTING;
} else if (voteeName.toLowerCase() === NO_LYNCH_NAME) {
voteeId = NO_LYNCH_NAME;
} else {
const votee = Array.from(userMap.values()).find(user => user.name === voteeName.toLowerCase());
if (votee) {
voteeId = votee.id;
}
const livingPlayers = getLivingPlayers();
if (!voteeId || !livingPlayers.find(livingPlayer => livingPlayer.playerId === voteeId)) {
throw new Error(`No player ${voteeName} is currently playing and alive.`);
}
}
for (const [votee, votes] of currentVotes) {
const idx = votes.indexOf(voterId);
if (idx > -1) {
votes.splice(idx, 1);
}
}
currentVotes.get(voteeId).push(voterId);
if (voteeId === NOT_VOTING) {
return Promise.all([
userMap, bot.postPublicMessage(`${userMap.get(voterId).name} is no longer voting.`)
]);
} else if (voteeId === NO_LYNCH_NAME) {
return Promise.all([
userMap, bot.postPublicMessage(`${userMap.get(voterId).name} is now voting ${NO_LYNCH_DISP}.`)
]);
} else {
return Promise.all([
userMap,
bot.postPublicMessage(`${userMap.get(voterId).name} is now voting ${userMap.get(voteeId).name}.`)
]);
}
})
.then(([userMap, _]) => {
const vc = getVc();
const halfPlus1 = Math.floor(getLivingPlayerCount() / 2) + 1;
const [lyncheeId, votesToLynch] = vc.find(([voteeId, votes]) => votes.length >= halfPlus1);
//a lynch has been reached.
if (lyncheeId && (lyncheeId !== NOT_VOTING)) {
const message: string[] = [];
if (lyncheeId !== NO_LYNCH_NAME) {
const slot = playerSlots.get(lyncheeId);
slot.die();
const victor = isGameOver();
if (victor != null) {
return endGame(victor);
}
message.push(`${userMap.get(lyncheeId).name} was lynched. They were a ${slot.name}.`);
message.push(`It is now Night ${currentPhase.num}. Night will last ${process.env.NIGHT_LENGTH} seconds.`);
} else {
message.push(`No one was lynched.`);
message.push(`It is now Night ${currentPhase.num}. Night will last ${process.env.NIGHT_LENGTH} seconds.`);
}
changePhase({ time: TimeOfDay.Night, num: currentPhase.num });
nightEndTimeout = setTimeout(endNight, parseInt(process.env.NIGHT_LENGTH, 10) * 1000);
return bot.postPublicMessage(message.join('\n'));
}
});
}
// other public getter/setters
export function reset(): void {
if (nightEndTimeout) {
clearTimeout(nightEndTimeout);
}
changePhase({ time: TimeOfDay.WaitingForPlayers });
currentGameId = undefined;
playerIds.length = 0;
playerSlots.clear();
currentActions.length = 0;
initVotes();
}
export function getPlayers(): Map<string, Slot> {
return playerSlots;
}
export function getGameId(): string {
return currentGameId;
}
export function setDefaultSetup(): void {
currentSetup = getFirstSetup();
}
export function getCurrentSetup(): any {
return currentSetup;
}
export function getFactionChannels(): Map<Alignment, string> {
return factionChannels;
} | export function getPhase(): Phase {
return currentPhase;
}
// private module methods
function requirePlaying(playerId: string): void {
if (!playerSlots.has(playerId)) {
throw new Error('You are not currently playing!');
}
}
function getPlayerUserMap(): Promise<Map<string, any>> {
return Promise.all(playerIds.map(playerId => bot.getUserById(playerId)))
.then(users => users.reduce((acc, user) => {
acc.set(user.id, user);
return acc;
}, new Map<string, any>()));
}
function startGame() {
changePhase({ time: TimeOfDay.Pregame });
currentGameId = shortId.generate();
playerSlots.clear();
const shuffledPlayers = _.shuffle(playerIds);
shuffledPlayers.forEach((playerId, i) => {
const rawSlot = currentSetup[':slots'][i];
const name = rawSlot[':name'];
const alignment = rawSlot[':alignment'];
const abilities = rawSlot[':abilities'].map(ability => {
return {
abilityType: ability[':ability-type'],
usage: {
charges: (ability[':usage'] && ability[':usage'][':charges']) || -1,
parity: (ability[':usage'] && ability[':usage'][':parity']) || ParityType.Any,
time: (ability[':usage'] && ability[':usage'][':time']) || TimeOfDay.Night
}
};
});
const slot = new Slot(playerId, name, alignment, abilities);
playerSlots.set(playerId, slot);
});
return Promise.all([createPrivateChannels(), sendRoles()])
.then(() => {
changePhase({ time: TimeOfDay.Day, num: 1 });
return bot.postPublicMessage(`It is now Day 1.`)
.then(() => {
initVotes();
doVoteCount();
});
});
}
function createPrivateChannels() {
const alignmentMap = Array.from(playerSlots.values())
.reduce((p, c) => {
if (!p.has(c.alignment)) {
return p.set(c.alignment, [c.playerId]);
} else {
p.get(c.alignment).push(c.playerId);
return p;
}
}, new Map<Alignment, [string]>());
return Promise.all(Array.from(alignmentMap.entries())
.filter(([alignment, _]) => alignment !== Alignment.Town)
.map(([alignment, members]) => {
return createPrivateChannel(`${AlignmentAttributesMap.get(alignment).name}-${getGameId()}`, members)
.then(channelId => {
return factionChannels.set(alignment, channelId);
});
}));
}
function sendRoles() {
return Promise.all(Array.from(playerSlots.entries())
.map(([playerId, slot]) => {
return bot.postMessageToUserById(playerId, `Your role is: ${slot.name}.`);
}));
}
function changePhase(phase: Phase): void {
currentPhase = phase;
initVotes();
for (const playerId of playerIds) {
if (playerSlots.has(playerId)) {
playerSlots.get(playerId).resetMutableState();
}
}
}
function addOrReplaceFormattedAction(action: Action) {
return Promise.try(() => {
const abilityDef = abilityFactory(action.abilityType);
if (!validate(action, currentPhase)) {
throw new Error('You are unable to perform this action.');
}
//remove any previous actions by that player of that type
let dedupers = [action.actor.playerId];
//factional actions may only be performed by one faction member per night
if (abilityDef.activationType === AbilityActivationType.Factional) {
dedupers = _.filter(Array.from(playerSlots), ([player, slot]) => slot.alignment === action.actor.alignment)
.map(([player, slot]) => player);
}
// remove action overwritten by the new one received if any
_(currentActions)
.remove(currentAction => action.abilityType === currentAction.abilityType && _(dedupers).includes(currentAction.actor.playerId))
.value();
// add new action
currentActions.push(action);
currentActions.sort((a, b) => {
return a.abilityType - b.abilityType;
});
if (factionChannels.has(action.actor.alignment)) {
return getPlayerUserMap()
.then(userMap => {
return bot.postMessage(
factionChannels.get(action.actor.alignment),
getActionsForFaction(action.actor.alignment).map(action => {
let a = `${userMap.get(action.actor.playerId).name} will ${actionDescriber(action.abilityType)}`;
if (action.target) {
a += ` ${userMap.get(action.target.playerId).name}`;
}
return a;
}).join('\n'));
});
}
});
}
function getVc(): any[] {
return Array.from(currentVotes.entries()).reduce((acc, [voteeId, votes]) => {
acc.push([voteeId, votes]);
return acc;
}, [])
.filter(([voteeId, votes]) => votes.length > 0)
.sort((a, b) => {
if (a[0] === NOT_VOTING) {
return 1;
} else if (b[0] === NOT_VOTING) {
return -1;
} else {
return b[1].length - a[1].length;
}
});
}
function getLivingPlayers(): Slot[] {
return Array.from(playerSlots.values()).filter(slot => slot.isAlive);
}
function getLivingPlayerCount(): number {
return getLivingPlayers().length;
}
function initVotes(): void {
const livingPlayerIdsUnordered = getLivingPlayers().map(player => player.playerId);
const livingPlayerIds = playerIds.filter(playerId => livingPlayerIdsUnordered.includes(playerId));
currentVotes = livingPlayerIds
.reduce((acc, playerId) => {
acc.set(playerId, []);
return acc;
}, new Map<string, string[]>());
currentVotes.set(NOT_VOTING, []);
currentVotes.set(NO_LYNCH_NAME, []);
livingPlayerIds.forEach(playerId => {
currentVotes.get(NOT_VOTING).push(playerId);
});
}
function getActionsForFaction(faction: Alignment): Action[] {
return currentActions.filter(action => {
return action.actor.alignment === faction;
});
}
function endNight() {
const passivesToApply = [];
Array.from(playerSlots.values()).forEach(slot => {
slot.abilities.forEach(ability => {
const abilityDef = abilityFactory(ability.abilityType);
if (abilityDef.activationType === AbilityActivationType.Passive) {
passivesToApply.push(addOrReplaceFormattedAction({
actor: slot,
abilityType: ability.abilityType
})
.catch(e => { }));
}
});
});
return Promise.all(passivesToApply)
.then(() => {
return Promise.all(currentActions.map(action => {
const ability = abilityFactory(action.abilityType);
action.actor.consumeAbility(action.abilityType);
return Promise.resolve(ability.resolve(action.actor, action.target));
}));
})
.then(() => {
const victor = isGameOver();
if (victor != null) {
return endGame(victor);
}
changePhase({ time: TimeOfDay.Day, num: currentPhase.num + 1 });
return bot.postPublicMessage(`It is now Day ${currentPhase.num}`)
.then(() => {
initVotes();
doVoteCount();
});
});
}
function isGameOver(): Alignment {
const livingPlayers = getLivingPlayers();
// town
if (livingPlayers.every(player => player.alignment === Alignment.Town)) {
return Alignment.Town;
}
const livingMafia = livingPlayers.filter(player => player.alignment === Alignment.Mafia);
if (livingMafia.length >= livingPlayers.length / 2) {
return Alignment.Mafia;
}
return null;
}
function endGame(victor: Alignment) {
const winners = Array.from(playerSlots.values()).filter(slot => slot.alignment === victor);
return getPlayerUserMap()
.then(userMap => {
let message = [`The game has ended. The ${AlignmentAttributesMap.get(victor).name}, consisting of:`];
message = message.concat(winners.map(winner => userMap.get(winner.playerId).name));
message.push(`has won!`);
reset();
return bot.postPublicMessage(message.join('\n'));
});
} | |
keeper.go | package keeper
import (
"encoding/binary"
"fmt"
"path/filepath"
"sync"
"github.com/tendermint/tendermint/libs/log"
"golang.org/x/sync/semaphore"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/store/prefix"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
paramstypes "github.com/cosmos/cosmos-sdk/x/params/types"
wasmvm "github.com/CosmWasm/wasmvm"
"github.com/terra-money/core/x/wasm/config"
"github.com/terra-money/core/x/wasm/types"
)
// Keeper will have a reference to Wasmer with it's own data directory.
type Keeper struct {
storeKey sdk.StoreKey
cdc codec.BinaryCodec
paramSpace paramstypes.Subspace
accountKeeper types.AccountKeeper
bankKeeper types.BankKeeper
treasuryKeeper types.TreasuryKeeper
serviceRouter types.MsgServiceRouter
queryRouter types.GRPCQueryRouter
wasmVM types.WasmerEngine
wasmReadVMPool []types.WasmerEngine
wasmReadVMSemaphore *semaphore.Weighted
wasmReadVMMutex *sync.Mutex
querier types.Querier
msgParser types.MsgParser
// WASM config values
wasmConfig *config.Config
}
// NewKeeper creates a new contract Keeper instance
func NewKeeper(
cdc codec.BinaryCodec,
storeKey sdk.StoreKey,
paramspace paramstypes.Subspace,
accountKeeper types.AccountKeeper,
bankKeeper types.BankKeeper,
treasuryKeeper types.TreasuryKeeper,
serviceRouter types.MsgServiceRouter,
queryRouter types.GRPCQueryRouter,
supportedFeatures string,
homePath string,
wasmConfig *config.Config) Keeper {
// set KeyTable if it has not already been set
if !paramspace.HasKeyTable() {
paramspace = paramspace.WithKeyTable(types.ParamKeyTable())
}
// prevent zero write vm cache
if wasmConfig.WriteVMMemoryCacheSize == 0 {
wasmConfig.WriteVMMemoryCacheSize = config.DefaultWriteVMMemoryCacheSize
}
var writeWasmVM types.WasmerEngine
if vm, err := wasmvm.NewVM(
filepath.Join(homePath, config.DBDir),
supportedFeatures,
types.ContractMemoryLimit,
wasmConfig.ContractDebugMode,
wasmConfig.WriteVMMemoryCacheSize,
); err != nil {
panic(err)
} else {
writeWasmVM = types.NewWasmerEngineWithQueryDepth(vm)
}
// prevent zero read vm
if wasmConfig.NumReadVMs == 0 {
wasmConfig.NumReadVMs = config.DefaultNumReadVM
}
// prevent zero read vm cache
if wasmConfig.ReadVMMemoryCacheSize == 0 {
wasmConfig.ReadVMMemoryCacheSize = config.DefaultReadVMMemoryCacheSize
}
numReadVms := wasmConfig.NumReadVMs
wasmReadVMPool := make([]types.WasmerEngine, numReadVms)
for i := uint32(0); i < numReadVms; i++ {
if vm, err := wasmvm.NewVM(
filepath.Join(homePath, config.DBDir),
supportedFeatures,
types.ContractMemoryLimit,
wasmConfig.ContractDebugMode,
wasmConfig.ReadVMMemoryCacheSize,
); err != nil {
panic(err)
} else {
wasmReadVMPool[i] = types.NewWasmerEngineWithQueryDepth(vm)
}
}
return Keeper{
storeKey: storeKey,
cdc: cdc,
paramSpace: paramspace,
wasmVM: writeWasmVM,
wasmReadVMPool: wasmReadVMPool,
wasmReadVMSemaphore: semaphore.NewWeighted(int64(numReadVms)),
wasmReadVMMutex: &sync.Mutex{},
accountKeeper: accountKeeper,
bankKeeper: bankKeeper,
treasuryKeeper: treasuryKeeper,
serviceRouter: serviceRouter,
queryRouter: queryRouter,
wasmConfig: wasmConfig,
msgParser: types.NewWasmMsgParser(),
querier: types.NewWasmQuerier(),
}
}
// Logger returns a module-specific logger.
func (k Keeper) Logger(ctx sdk.Context) log.Logger {
return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName))
}
| // GetLastCodeID return last code ID
func (k Keeper) GetLastCodeID(ctx sdk.Context) (uint64, error) {
store := ctx.KVStore(k.storeKey)
bz := store.Get(types.LastCodeIDKey)
if bz == nil {
return 0, sdkerrors.Wrap(types.ErrInvalidGenesis, "initial code ID hasn't been set")
}
return binary.BigEndian.Uint64(bz), nil
}
// SetLastCodeID set last code id
func (k Keeper) SetLastCodeID(ctx sdk.Context, id uint64) {
store := ctx.KVStore(k.storeKey)
bz := sdk.Uint64ToBigEndian(id)
store.Set(types.LastCodeIDKey, bz)
}
// GetLastInstanceID return last instance ID
func (k Keeper) GetLastInstanceID(ctx sdk.Context) (uint64, error) {
store := ctx.KVStore(k.storeKey)
bz := store.Get(types.LastInstanceIDKey)
if bz == nil {
return 0, sdkerrors.Wrap(types.ErrInvalidGenesis, "initial instance ID hasn't been set")
}
return binary.BigEndian.Uint64(bz), nil
}
// SetLastInstanceID set last instance id
func (k Keeper) SetLastInstanceID(ctx sdk.Context, id uint64) {
store := ctx.KVStore(k.storeKey)
bz := sdk.Uint64ToBigEndian(id)
store.Set(types.LastInstanceIDKey, bz)
}
// GetCodeInfo returns CodeInfo for the given codeID
func (k Keeper) GetCodeInfo(ctx sdk.Context, codeID uint64) (codeInfo types.CodeInfo, err error) {
store := ctx.KVStore(k.storeKey)
bz := store.Get(types.GetCodeInfoKey(codeID))
if bz == nil {
return types.CodeInfo{}, sdkerrors.Wrapf(types.ErrNotFound, "codeID %d", codeID)
}
k.cdc.MustUnmarshal(bz, &codeInfo)
return
}
// SetCodeInfo stores CodeInfo for the given codeID
func (k Keeper) SetCodeInfo(ctx sdk.Context, codeID uint64, codeInfo types.CodeInfo) {
store := ctx.KVStore(k.storeKey)
bz := k.cdc.MustMarshal(&codeInfo)
store.Set(types.GetCodeInfoKey(codeID), bz)
}
// GetContractInfo returns contract info of the given address
func (k Keeper) GetContractInfo(ctx sdk.Context, contractAddress sdk.AccAddress) (contractInfo types.ContractInfo, err error) {
store := ctx.KVStore(k.storeKey)
contractBz := store.Get(types.GetContractInfoKey(contractAddress))
if contractBz == nil {
return types.ContractInfo{}, sdkerrors.Wrapf(types.ErrNotFound, "constractInfo %s", contractAddress.String())
}
k.cdc.MustUnmarshal(contractBz, &contractInfo)
return contractInfo, nil
}
// SetContractInfo stores ContractInfo for the given contractAddress
func (k Keeper) SetContractInfo(ctx sdk.Context, contractAddress sdk.AccAddress, codeInfo types.ContractInfo) {
store := ctx.KVStore(k.storeKey)
b := k.cdc.MustMarshal(&codeInfo)
store.Set(types.GetContractInfoKey(contractAddress), b)
}
// IterateContractInfo iterates all contract infos
func (k Keeper) IterateContractInfo(ctx sdk.Context, cb func(types.ContractInfo) bool) {
prefixStore := prefix.NewStore(ctx.KVStore(k.storeKey), types.ContractInfoKey)
iter := prefixStore.Iterator(nil, nil)
for ; iter.Valid(); iter.Next() {
var contract types.ContractInfo
k.cdc.MustUnmarshal(iter.Value(), &contract)
// cb returns true to stop early
if cb(contract) {
break
}
}
}
// GetContractStoreIterator returns iterator for a contract store
func (k Keeper) GetContractStoreIterator(ctx sdk.Context, contractAddress sdk.AccAddress) sdk.Iterator {
prefixStoreKey := types.GetContractStoreKey(contractAddress)
prefixStore := prefix.NewStore(ctx.KVStore(k.storeKey), prefixStoreKey)
return prefixStore.Iterator(nil, nil)
}
// SetContractStore records all the Models on the contract store
func (k Keeper) SetContractStore(ctx sdk.Context, contractAddress sdk.AccAddress, models []types.Model) {
prefixStoreKey := types.GetContractStoreKey(contractAddress)
prefixStore := prefix.NewStore(ctx.KVStore(k.storeKey), prefixStoreKey)
for _, model := range models {
prefixStore.Set(model.Key, model.Value)
}
}
// GetByteCode returns ByteCode of the given CodeHash
func (k Keeper) GetByteCode(ctx sdk.Context, codeID uint64) ([]byte, error) {
codeInfo, sdkErr := k.GetCodeInfo(ctx, codeID)
if sdkErr != nil {
return nil, sdkErr
}
byteCode, err := k.wasmVM.GetCode(codeInfo.CodeHash)
if err != nil {
return nil, err
}
return byteCode, nil
}
// RegisterMsgParsers register module msg parsers
func (k *Keeper) RegisterMsgParsers(
parsers map[string]types.WasmMsgParserInterface,
stargateWasmMsgParser types.StargateWasmMsgParserInterface,
) {
for route, parser := range parsers {
k.msgParser.Parsers[route] = parser
}
if stargateWasmMsgParser != nil {
k.msgParser.StargateParser = stargateWasmMsgParser
}
}
// RegisterQueriers register module queriers
func (k *Keeper) RegisterQueriers(
queriers map[string]types.WasmQuerierInterface,
stargateWasmQuerier types.StargateWasmQuerierInterface,
) {
for route, querier := range queriers {
k.querier.Queriers[route] = querier
}
if stargateWasmQuerier != nil {
k.querier.StargateQuerier = stargateWasmQuerier
}
} | |
pod_template.rs | // Generated from definition io.k8s.api.core.v1.PodTemplate
/// PodTemplate describes a template for creating copies of a predefined pod.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct PodTemplate {
/// Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
pub metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta>,
/// Template defines the pods that will be created from this pod template. https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status
pub template: Option<crate::api::core::v1::PodTemplateSpec>,
}
// Begin /v1/PodTemplate
// Generated from operation createCoreV1NamespacedPodTemplate
impl PodTemplate {
/// create a PodTemplate
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::CreateResponse`]`<Self>>` constructor, or [`crate::CreateResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn create_namespaced_pod_template(
namespace: &str,
body: &crate::api::core::v1::PodTemplate,
optional: crate::CreateOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::CreateResponse<Self>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::post(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteCoreV1CollectionNamespacedPodTemplate
impl PodTemplate {
/// delete collection of PodTemplate
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>>` constructor, or [`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `delete_optional`
///
/// Delete options. Use `Default::default()` to not pass any.
///
/// * `list_optional`
///
/// List options. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_collection_namespaced_pod_template(
namespace: &str,
delete_optional: crate::DeleteOptional<'_>,
list_optional: crate::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<crate::List<Self>>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
list_optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&delete_optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteCoreV1NamespacedPodTemplate
impl PodTemplate {
/// delete a PodTemplate
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<Self>>` constructor, or [`crate::DeleteResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the PodTemplate
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_namespaced_pod_template(
name: &str,
namespace: &str,
optional: crate::DeleteOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<Self>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates/{name}",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listCoreV1NamespacedPodTemplate
impl PodTemplate {
/// list or watch objects of kind PodTemplate
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>`>` constructor, or [`crate::ListResponse`]`<Self>`` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_namespaced_pod_template(
namespace: &str,
optional: crate::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listCoreV1PodTemplateForAllNamespaces
impl PodTemplate {
/// list or watch objects of kind PodTemplate
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>`>` constructor, or [`crate::ListResponse`]`<Self>`` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_pod_template_for_all_namespaces(
optional: crate::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> |
}
// Generated from operation patchCoreV1NamespacedPodTemplate
impl PodTemplate {
/// partially update the specified PodTemplate
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::PatchResponse`]`<Self>>` constructor, or [`crate::PatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the PodTemplate
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_pod_template(
name: &str,
namespace: &str,
body: &crate::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::PatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::PatchResponse<Self>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::patch(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static(match body {
crate::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation readCoreV1NamespacedPodTemplate
impl PodTemplate {
/// read the specified PodTemplate
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedPodTemplateResponse`]`>` constructor, or [`ReadNamespacedPodTemplateResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the PodTemplate
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_pod_template(
name: &str,
namespace: &str,
optional: ReadNamespacedPodTemplateOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReadNamespacedPodTemplateResponse>), crate::RequestError> {
let ReadNamespacedPodTemplateOptional {
exact,
export,
pretty,
} = optional;
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(exact) = exact {
__query_pairs.append_pair("exact", &exact.to_string());
}
if let Some(export) = export {
__query_pairs.append_pair("export", &export.to_string());
}
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`PodTemplate::read_namespaced_pod_template`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedPodTemplateOptional<'a> {
/// Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
pub exact: Option<bool>,
/// Should this value be exported. Export strips fields that a user can not specify.
pub export: Option<bool>,
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedPodTemplateResponse as Response>::try_from_parts` to parse the HTTP response body of [`PodTemplate::read_namespaced_pod_template`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedPodTemplateResponse {
Ok(crate::api::core::v1::PodTemplate),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedPodTemplateResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedPodTemplateResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedPodTemplateResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceCoreV1NamespacedPodTemplate
impl PodTemplate {
/// replace the specified PodTemplate
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ReplaceResponse`]`<Self>>` constructor, or [`crate::ReplaceResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the PodTemplate
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_pod_template(
name: &str,
namespace: &str,
body: &crate::api::core::v1::PodTemplate,
optional: crate::ReplaceOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::ReplaceResponse<Self>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::put(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchCoreV1NamespacedPodTemplate
impl PodTemplate {
/// list or watch objects of kind PodTemplate
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_namespaced_pod_template(
namespace: &str,
optional: crate::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = format!("/api/v1/namespaces/{namespace}/podtemplates?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchCoreV1PodTemplateForAllNamespaces
impl PodTemplate {
/// list or watch objects of kind PodTemplate
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_pod_template_for_all_namespaces(
optional: crate::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = "/api/v1/podtemplates?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// End /v1/PodTemplate
impl crate::Resource for PodTemplate {
const API_VERSION: &'static str = "v1";
const GROUP: &'static str = "";
const KIND: &'static str = "PodTemplate";
const VERSION: &'static str = "v1";
}
impl crate::ListableResource for PodTemplate {
const LIST_KIND: &'static str = concat!("PodTemplate", "List");
}
impl crate::Metadata for PodTemplate {
type Ty = crate::apimachinery::pkg::apis::meta::v1::ObjectMeta;
fn metadata(&self) -> Option<&<Self as crate::Metadata>::Ty> {
self.metadata.as_ref()
}
}
impl<'de> serde::Deserialize<'de> for PodTemplate {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_metadata,
Key_template,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"metadata" => Field::Key_metadata,
"template" => Field::Key_template,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = PodTemplate;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(<Self::Value as crate::Resource>::KIND)
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_template: Option<crate::api::core::v1::PodTemplateSpec> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::API_VERSION {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::API_VERSION));
}
},
Field::Key_kind => {
let value_kind: String = serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::KIND {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::KIND));
}
},
Field::Key_metadata => value_metadata = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_template => value_template = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(PodTemplate {
metadata: value_metadata,
template: value_template,
})
}
}
deserializer.deserialize_struct(
<Self as crate::Resource>::KIND,
&[
"apiVersion",
"kind",
"metadata",
"template",
],
Visitor,
)
}
}
impl serde::Serialize for PodTemplate {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
<Self as crate::Resource>::KIND,
2 +
self.metadata.as_ref().map_or(0, |_| 1) +
self.template.as_ref().map_or(0, |_| 1),
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::API_VERSION)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::KIND)?;
if let Some(value) = &self.metadata {
serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
if let Some(value) = &self.template {
serde::ser::SerializeStruct::serialize_field(&mut state, "template", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| {
let __url = "/api/v1/podtemplates?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
} |
Home.js | import React from 'react';
import HomeComp from '../components/HomeComp';
import NavTabs from '../components/NavTabs';
function | () {
return (
<div>
<NavTabs />
<HomeComp />
</div>
)
};
export default Home; | Home |
ListTagsController.ts | import { Request, Response } from "express";
import { ListTagService } from "../services/ListTagService";
|
async handle(request: Request, response: Response){
const listTagService = new ListTagService()
const tags = await listTagService.execute()
return response.json(tags)
}
} |
export class ListTagController{
|
parser-utils.ts | import { SourceFile } from "ts-simple-ast";
export function | (sourceFile: SourceFile, componentName: string) {
return sourceFile.getClass(componentName) || sourceFile.getFunction(componentName);
}; | getComponent |
V5_sni_hostname_migration.go | package migration
import (
"code.cloudfoundry.org/routing-api/db"
"code.cloudfoundry.org/routing-api/models"
)
type V5SniHostnameMigration struct{}
var _ Migration = new(V5SniHostnameMigration)
func | () *V5SniHostnameMigration {
return &V5SniHostnameMigration{}
}
func (v *V5SniHostnameMigration) Version() int {
return 5
}
func (v *V5SniHostnameMigration) Run(sqlDB *db.SqlDB) error {
_, err := sqlDB.Client.Model(&models.TcpRouteMapping{}).RemoveIndex("idx_tcp_route")
if err != nil {
return err
}
err = sqlDB.Client.AutoMigrate(&models.TcpRouteMapping{})
if err != nil {
return err
}
return err
}
| NewV5SniHostnameMigration |
lib.rs | #[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
extern crate convert_case;
extern crate proc_macro;
extern crate proc_macro_error;
use crate::root::get_default_route;
use crate::routing::routing_variant_snippets;
use proc_macro::TokenStream;
use crate::routing_modules::{module_init_snippets, modules_path, modules_snippets};
use proc_macro_error::{abort, proc_macro_error, Diagnostic, Level};
use quote::quote;
use syn::{
export::TokenStream2, parse::Result, parse_macro_input, Attribute, Data, DataEnum, DeriveInput,
Error, Field, Fields, Ident, Lit, LitStr, Meta, MetaNameValue, Variant,
};
mod guard;
mod root;
mod routing;
mod routing_modules;
/// Derive an enum as Routing for navigation
/// You can change the value of a path for a given route this way
///
///
///
/// ```rust
///
/// #[derive(Debug, PartialEq, Copy, Clone, AsUrl)]
/// pub enum DashboardAdminRoutes {
/// #[as_path = "my_stuff"] // "/my_stuff"
/// Other,
/// #[as_path = ""]
/// Root, // "/"
/// }
///
///
/// fn test_url() {
/// let mut query_search: IndexMap<String, String> = IndexMap::new();
///
/// query_search.insert("user".to_string(), "arn".to_string());
/// query_search.insert("role".to_string(), "baby_programmer".to_string());
/// query_search.insert("location".to_string(), "norway".to_string());
/// let url = ExampleRoutes::Admin {
/// query: query_search.clone(),
/// }
/// .to_url();
/// let url_to_compare: Url = "/admin?user=arn&role=baby_programmer&location=norway"
/// .parse()
/// .unwrap();
/// assert_eq!(url, url_to_compare);
/// }
///
/// ```
///
#[proc_macro_error]
#[proc_macro_derive(AsUrl, attributes(as_path))]
pub fn derive_as_url(item: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(item as DeriveInput);
let variants = match data {
Data::Enum(data) => data.variants,
_ => abort!(Diagnostic::new(
Level::Error,
"Can only derive AsPath for enums.".into()
)),
};
let variants = variants.iter();
let (as_snippets, parse_snippets) = routing_variant_snippets(variants.clone());
let name = ident.to_string();
TokenStream::from(quote! {
impl router::Navigation for #ident {
fn to_url(&self) -> Url {
let url : Url = match self {
#(#as_snippets),*
}.parse().unwrap();
url
}
fn from_url(url: Url) -> std::result::Result<Self, ParseError>
where
Self: Sized + ParsePath {
let string_url = url.to_string();
Self::parse_path(&string_url)
}
}
impl AsPath for #ident {
fn as_path(self) -> String {
match self {
#(#as_snippets),*
}
}
}
impl router::ParsePath for #ident {
fn parse_path(path: &str) -> std::result::Result<Self, ParseError> {
let next = path.trim_start_matches("/");
Err(ParseError::NoMatch)
#(.or_else(|err|
#parse_snippets
)
)*
.map_err(|err| ParseError::By(#name.to_string(), Box::new(err)))
}
}
})
}
fn get_string_from_attribute(attribute_name: &str, attr: &Attribute) -> Result<Option<LitStr>> {
if !attr.path.is_ident(attribute_name) {
return Ok(None); // not our attribute
}
match attr.parse_meta()? {
Meta::NameValue(MetaNameValue {
lit: Lit::Str(name),
..
}) => Some(Some(name)),
_ => None,
}
.ok_or_else(|| Error::new_spanned(attr, &format!("expected #[{} = \"...\"]", attribute_name)))
}
/// Rebuild the content of a variant depending of the fields present in the original enum
fn | (structs_tuple: (Option<&Field>, Option<&Field>, Option<&Field>)) -> TokenStream2 {
match structs_tuple {
(id, query, children) if id.is_some() && query.is_some() && children.is_some() => {
quote! { id,query,children}
}
(id, query, _) if id.is_some() && query.is_some() => {
quote! { id, query}
}
(id, query, children) if id.is_none() && query.is_some() && children.is_some() => {
quote! { query , children}
}
(id, query, children) if id.is_some() && children.is_some() && query.is_none() => {
quote! { id, children }
}
(id, query, children) if id.is_some() && query.is_none() && children.is_none() => {
quote! { id }
}
(id, query, children) if query.is_some() && id.is_none() && children.is_none() => {
quote! { query}
}
(id, query, children) if query.is_none() && id.is_none() & children.is_some() => {
quote! { children }
}
(id, query, children) if query.is_none() && id.is_none() & children.is_none() => {
quote! {}
}
(_, _, _) => {
quote! {}
}
}
}
/// Assign only the payload defined by the field in the enu,
fn build_advanced(structs_tuple: (Option<&Field>, Option<&Field>, Option<&Field>)) -> TokenStream2 {
match structs_tuple {
(id, query, children) if id.is_some() && query.is_some() && children.is_some() => {
let sub_enum = &children.clone().unwrap().ty;
quote! { id : id.unwrap(),query : query.unwrap(),children : #sub_enum::parse_path(&children.unwrap()).unwrap()}
}
(id, query, _) if id.is_some() && query.is_some() => {
quote! { id : id.unwrap(),query : query.unwrap()}
}
(id, query, children) if id.is_none() && query.is_some() && children.is_some() => {
let sub_enum = &children.clone().unwrap().ty;
quote! { query : query.unwrap(),children : #sub_enum::parse_path(&children.unwrap()).unwrap()}
}
(id, query, children) if id.is_some() && children.is_some() && query.is_none() => {
let sub_enum = &children.clone().unwrap().ty;
quote! { id : id.unwrap(),children : #sub_enum::parse_path(&children.unwrap()).unwrap()}
}
(id, query, children) if id.is_some() && query.is_none() && children.is_none() => {
quote! { id : id.unwrap()}
}
(id, query, children) if query.is_some() && id.is_none() && children.is_none() => {
quote! { query : query.unwrap()}
}
(id, query, children) if query.is_none() && id.is_none() & children.is_some() => {
let sub_enum = &children.clone().unwrap().ty;
quote! { children :#sub_enum::parse_path(&children.unwrap().clone()).unwrap()}
}
(_, _, _) => {
quote! {}
}
}
}
fn build_string_payload(structs_tuple: (Option<&Field>, Option<&Field>, Option<&Field>)) -> String {
match structs_tuple {
(id, query, children) if id.is_some() && query.is_some() && children.is_some() => {
"id,query,children".to_string()
}
(id, query, _) if id.is_some() && query.is_some() => "id,query".to_string(),
(id, query, children) if id.is_none() && query.is_some() && children.is_some() => {
"query,children".to_string()
}
(id, query, children) if id.is_some() && children.is_some() && query.is_none() => {
"id,children".to_string()
}
(id, query, children) if id.is_some() && query.is_none() && children.is_none() => {
"id".to_string()
}
(id, query, children) if query.is_some() && id.is_none() && children.is_none() => {
"query".to_string()
}
(id, query, children) if query.is_none() && id.is_none() & children.is_some() => {
"children".to_string()
}
(id, query, children) if query.is_none() && id.is_none() & children.is_none() => {
"".to_string()
}
(_, _, _) => "".to_string(),
}
}
/// Define a routing config as root for your navigation.
/// It will contain the default route used by the router when it cannot find the right url
/// ```rust
///
/// #[derive(Debug, PartialEq, Copy, Clone, Root)]
/// pub enum DashboardAdminRoutes {
/// #[default_route]
/// NotFound, // -> /blablablalbla -> /not_found
/// Root,
/// }
/// ```
///
#[proc_macro_error]
#[proc_macro_derive(Root, attributes(default_route))]
pub fn define_as_root(item: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(item as DeriveInput);
let variants = match data {
Data::Enum(data) => data.variants,
_ => abort!(Diagnostic::new(
Level::Error,
"Can only derive AsPath for enums.".into()
)),
};
let variants = variants.iter();
let default_route = get_default_route(variants.clone());
if default_route.is_err() {
abort!(Diagnostic::new(
Level::Error,
"Could not find default_route".into()
))
}
let default_variant = default_route.unwrap();
match default_variant.fields {
Fields::Named(_) => abort!(Diagnostic::new(
Level::Error,
"Default route need to be simple".into()
)),
Fields::Unnamed(_) => abort!(Diagnostic::new(
Level::Error,
"Default route need to be simple".into()
)),
Fields::Unit => {}
}
let default_variant_ident = default_variant.ident;
TokenStream::from(quote! {
impl Default for #ident {
fn default() -> #ident {
#ident::#default_variant_ident
}
}
})
}
/// The RoutingModule makes the enum variants representing modules loaded by the routes
/// By default, an enum variant snake case is equal to its module name
///
/// You can rename the path
/// You can specify routes that does not load module ( no init, no specific Model & Msg and no view )
///
/// The derive macro will call the init function , Model, Msg, Routes, Update, and View
///
/// Todo :
/// - Could add as_module
/// - Could generate the code for fn update as well ?
/// ```rust
///
///
///
/// #[derive(Debug, PartialEq, Clone, RoutingModules)]
/// pub enum ExampleRoutes {
/// // #[as_module= "my_stuff"] // the module is name my_stuff.rs
/// Other {
/// id: String,
/// children: Settings,
/// },
/// #[guard = "logged_user => admin_guard => not_authorized_view"]
/// Admin { // will load module "admin.rs"
/// // will load model.admin and as well
/// // equal to
/// // #[model_scope = "admin => admin ::init"] will check init has correct arguments
/// // #[view_scope = "admin => admin::view"] will check viewt has correct arguments
/// query: IndexMap<String, String>,
/// },
/// #[guard = "logged_user => user_guard => not_logged_user_view"]
/// Dashboard(DashboardRoutes), // will load module "dashboard"
/// Profile { // will load module "profile"
/// id: String,
/// },
/// #[guard = "logged_user => admin_guard => not_authorized_view"]
/// #[view = " => my_stuff"]
/// MyStuff,
/// #[view = " => not_found"]
/// #[default_route]
/// NotFound,
/// #[view = " => home"]
/// #[as_path = ""]
/// Root,
/// }
///
/// fn view(model: &Model) -> impl IntoNodes<Msg> {
/// vec![
/// header(&model),
/// if let Some(route) = &model.router.current_route {
/// route.view(model)
/// } else {
/// home(&model.theme)
/// },
/// ]
/// }
///
/// ```
///
///
#[proc_macro_error]
#[proc_macro_derive(
RoutingModules,
attributes(as_path, view, guard, default_route, modules_path)
)]
pub fn derive_add_module_load(item: TokenStream) -> TokenStream {
let add_url = derive_as_url(item.clone());
let root = define_as_root(item.clone());
let DeriveInput {
ident, data, attrs, ..
} = parse_macro_input!(item as DeriveInput);
let variants = match data {
Data::Enum(data) => data.variants,
_ => abort!(Diagnostic::new(
Level::Error,
"Can only derive AsPath for enums.".into()
)),
};
let url_impl = TokenStream2::from(add_url);
let default_route_impl = TokenStream2::from(root);
let variants = variants.iter();
let modules_path = modules_path(ident.clone(), attrs.iter());
let modules_snippets = modules_snippets(variants.clone(), modules_path.clone());
let init_snippets = module_init_snippets(variants.clone(), modules_path.clone());
TokenStream::from(quote! {
#url_impl
#default_route_impl
impl router::View<#ident, Model, Msg> for #ident {
fn view(&self, scoped_state: &Model) -> Node<Msg> {
match self {
#(#modules_snippets),*
}
}
}
impl router::Init<#ident, Model, Msg> for #ident {
fn init<'b, 'c>(
&self,
previous_state: &'b mut Model,
orders: &'c mut impl Orders<Msg>,
) -> &'b mut Model {
match self {
#(#init_snippets),*
}
previous_state
}
}
})
}
| build_structs |
test_for_daysBetweenDates.py | def nextDay(year, month, day):
"""Simple version: assume every month has 30 days"""
if day < 30:
return year, month, day + 1
else:
if month == 12:
return year + 1, 1, 1
else:
return year, month + 1, 1
def | (year1, month1, day1, year2, month2, day2):
"""Returns True if year1-month1-day1 is before
year2-month2-day2. Otherwise, returns False."""
if year1 < year2:
return True
if year1 == year2:
if month1 < month2:
return True
if month1 == month2:
return day1 < day2
return False
def daysBetweenDates(year1, month1, day1, year2, month2, day2):
"""Returns the number of days between year1/month1/day1
and year2/month2/day2. Assumes inputs are valid dates
in Gregorian calendar."""
# program defensively! Add an assertion if the input is not valid!
assert year2>=year1
assert month2>=month1
assert day2>=day1
days = 0
while dateIsBefore(year1, month1, day1, year2, month2, day2):
year1, month1, day1 = nextDay(year1, month1, day1)
days += 1
return days
def test():
test_cases = [((2012,9,30,2012,10,30),30),
((2012,1,1,2013,1,1),360),
((2012,9,1,2012,9,4),3),
((2013,1,1,1999,12,31), "AssertionError")]
for (args, answer) in test_cases:
try:
result = daysBetweenDates(*args)
if result == answer and answer != "AssertionError":
print ("Test case passed!")
else:
print ("Test with data:", args, "failed")
except AssertionError:
if answer == "AssertionError":
print ("Nice job! Test case {0} correctly raises AssertionError!\n".format(args))
else:
print ("Check your work! Test case {0} should not raise AssertionError!\n".format(args))
test() | dateIsBefore |
BezierCurve.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BezierCurve = exports.BezierCurvepPoints = void 0;
const Point_1 = require("./Point");
const Polygon_1 = require("./Polygon");
var BezierCurvepPoints;
(function (BezierCurvepPoints) {
BezierCurvepPoints[BezierCurvepPoints["SOURCE"] = 0] = "SOURCE";
BezierCurvepPoints[BezierCurvepPoints["SOURCE_CONTROL"] = 1] = "SOURCE_CONTROL";
BezierCurvepPoints[BezierCurvepPoints["TARGET_CONTROL"] = 2] = "TARGET_CONTROL";
BezierCurvepPoints[BezierCurvepPoints["TARGET"] = 3] = "TARGET";
})(BezierCurvepPoints = exports.BezierCurvepPoints || (exports.BezierCurvepPoints = {}));
class | extends Polygon_1.Polygon {
constructor() {
super([new Point_1.Point(0, 0), new Point_1.Point(0, 0), new Point_1.Point(0, 0), new Point_1.Point(0, 0)]);
}
getSVGCurve() {
return `M${this.getSource().toSVG()} C${this.getSourceControl().toSVG()}, ${this.getTargetControl().toSVG()}, ${this.getTarget().toSVG()}`;
}
setPoints(points) {
if (points.length !== 4) {
throw new Error('BezierCurve must have extactly 4 points');
}
super.setPoints(points);
}
getSource() {
return this.points[BezierCurvepPoints.SOURCE];
}
getSourceControl() {
return this.points[BezierCurvepPoints.SOURCE_CONTROL];
}
getTargetControl() {
return this.points[BezierCurvepPoints.TARGET_CONTROL];
}
getTarget() {
return this.points[BezierCurvepPoints.TARGET];
}
setSource(point) {
this.points[BezierCurvepPoints.SOURCE] = point;
}
setSourceControl(point) {
this.points[BezierCurvepPoints.SOURCE_CONTROL] = point;
}
setTargetControl(point) {
this.points[BezierCurvepPoints.TARGET_CONTROL] = point;
}
setTarget(point) {
this.points[BezierCurvepPoints.TARGET] = point;
}
}
exports.BezierCurve = BezierCurve;
//# sourceMappingURL=BezierCurve.js.map | BezierCurve |
version.rs | // Copyright (c) Microsoft. All rights reserved.
use std::fmt;
use std::str::FromStr;
pub const API_VERSION: Version = Version::Version2019_01_30;
#[derive(Clone, Copy, Debug, PartialOrd, PartialEq)]
pub enum | {
Version2018_06_28,
Version2019_01_30,
}
impl FromStr for Version {
type Err = ();
fn from_str(s: &str) -> Result<Version, ()> {
match s {
"2018-06-28" => Ok(Version::Version2018_06_28),
"2019-01-30" => Ok(Version::Version2019_01_30),
_ => Err(()),
}
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Version::Version2018_06_28 => write!(f, "2018-06-28"),
Version::Version2019_01_30 => write!(f, "2019-01-30"),
}
}
}
| Version |
users.service.ts | import { HttpException, HttpStatus, Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import CreateUserDto from './dto/createUser.dto';
import User from './users.entity';
@Injectable()
export class | {
constructor(
@InjectRepository(User)
private usersRepository: Repository<User>,
) {}
async getByEmail(email: string) {
const user = await this.usersRepository.findOne({ email });
if (user) return user;
throw new HttpException(
'User with this email does not exist',
HttpStatus.NOT_FOUND,
);
}
async getById(id: number) {
const user = await this.usersRepository.findOne({ id });
if (user) {
return user;
}
throw new HttpException(
'User with this id does not exist',
HttpStatus.NOT_FOUND,
);
}
async create(userData: CreateUserDto) {
const newUser = await this.usersRepository.create(userData);
await this.usersRepository.save(newUser);
return newUser;
}
}
| UsersService |
test_scrape.py | """
ScrapeService API Tests
"""
# pylint: disable=protected-access,missing-class-docstring,unidiomatic-typecheck
# stdlib
import unittest
# library
import pytest
# module
from avwx import exceptions, service
# tests
from .test_base import BaseTestService
class TestStationScrape(BaseTestService):
service_class = service.scrape.StationScrape
report_type = "metar"
required_attrs = ("method", "_strip_whitespace", "_extract")
def test_service(self):
"""Tests for expected values and method implementation"""
# pylint: disable=no-member
if type(self.serv) == service.scrape.StationScrape:
self.assertIsNone(self.serv.url)
else:
self.assertIsInstance(self.serv.url, str)
self.assertIsInstance(self.serv.method, str)
self.assertIn(self.serv.method, ("GET", "POST"))
def test_make_err(self):
"""Tests that InvalidRequest exceptions are generated with the right message"""
# pylint: disable=no-member
key, msg = "test_key", "testing"
err = self.serv._make_err(msg, key)
err_str = (
f"Could not find {key} in {self.serv.__class__.__name__} response\n{msg}"
)
self.assertIsInstance(err, exceptions.InvalidRequest)
self.assertEqual(err.args, (err_str,))
self.assertEqual(str(err), err_str)
def test_fetch_exceptions(self):
"""Tests fetch exception handling"""
for station in ("12K", "MAYT"):
with self.assertRaises(exceptions.BadStation):
self.serv.fetch(station) # pylint: disable=no-member
# Should raise exception due to empty url
if type(self.serv) == service.scrape.ScrapeService:
with self.assertRaises(NotImplementedError):
self.serv.fetch("KJFK") # pylint: disable=no-member
@pytest.mark.asyncio
async def test_async_fetch_exceptions(self):
"""Tests async fetch exception handling"""
for station in ("12K", "MAYT"):
with self.assertRaises(exceptions.BadStation):
await self.serv.async_fetch(station) # pylint: disable=no-member
# Should raise exception due to empty url
if type(self.serv) == service.scrape.ScrapeService:
with self.assertRaises(NotImplementedError):
await self.serv.async_fetch("KJFK") # pylint: disable=no-member
class TestNOAA(TestStationScrape):
service_class = service.NOAA
stations = ["KJFK", "EGLL", "PHNL"]
class TestAMO(TestStationScrape):
service_class = service.AMO
stations = ["RKSI", "RKSS", "RKNY"]
# class TestMAC(TestStationScrape):
# service_class = service.MAC
# stations = ["SKBO"]
class TestAUBOM(TestStationScrape):
service_class = service.AUBOM
stations = ["YBBN", "YSSY", "YCNK"]
class TestOLBS(TestStationScrape):
service_class = service.OLBS
stations = ["VAPO", "VEGT"]
class TestNAM(TestStationScrape):
|
class TestAVT(TestStationScrape):
service_class = service.AVT
stations = ["ZJQH", "ZYCC", "ZSWZ"]
class TestModule(unittest.TestCase):
def test_get_service(self):
"""Tests that the correct service class is returned"""
for stations, country, serv in (
(("KJFK", "PHNL"), "US", service.NOAA),
(("EGLL",), "GB", service.NOAA),
(("RKSI",), "KR", service.AMO),
# (("SKBO", "SKPP"), "CO", service.MAC),
(("YWOL", "YSSY"), "AU", service.AUBOM),
(("VAPO", "VEGT"), "IN", service.OLBS),
(("ZJQH", "ZYCC", "ZSWZ"), "CN", service.AVT),
):
for station in stations:
self.assertIsInstance(
service.get_service(station, country)("metar"), serv
)
| service_class = service.NAM
stations = ["EHAM", "ENGM", "BIRK"] |
dict_conv.py | # -*- coding: utf-8 -*-
# dict_conv.py (Python3 script)
import sys
ENC_UTF16_BE = 1
ENC_UTF16_LE = 2
def add_char(enc, s, c):
if enc == ENC_UTF16_BE:
s += "\\x00"
s += c
if enc == ENC_UTF16_LE:
s += "\\x00"
return s
def conv(enc, s):
n = len(s)
r = ""
i = 0
while i < n:
c = s[i]
if c == '\\':
c = s[i+1]
if c == '\\' or c == '"':
r = add_char(enc, r, "\\" + c)
i += 2
continue
else:
raise("Unknown escape {0}".format(s))
r = add_char(enc, r, c)
i += 1
return r
def | (enc):
print("# This file was generated by dict_conv.py.")
for line in sys.stdin:
s = line.strip()
if s[0] == '#':
print(s)
continue
if s[0] == '"' and s[-1] == '"':
s = conv(enc, s[1:-1])
print("\"{0}\"".format(s))
else:
raise("Invalid format {0}".format(s))
def usage(argv):
raise RuntimeError("Usage: python {0} utf16_be/utf16_le".format(argv[0]))
if __name__ == "__main__":
argv = sys.argv
argc = len(argv)
if argc >= 2:
s = argv[1]
if s == 'utf16_be':
enc = ENC_UTF16_BE
elif s == 'utf16_le':
enc = ENC_UTF16_LE
else:
usage(argv)
else:
usage(argv)
main(enc)
| main |
test.py | import sys
def run(): |
if len(line) == 0:
stop = True
else:
print line + "!?!?"
sys.stdout.flush()
run() | stop = False
while not stop:
line = sys.stdin.readline().strip() |
approx.py | import operator
from decimal import Decimal
from fractions import Fraction
from operator import eq
from operator import ne
import pytest
from pytest import approx
inf, nan = float("inf"), float("nan")
@pytest.fixture
def mocked_doctest_runner(monkeypatch):
import doctest
class MockedPdb:
def __init__(self, out):
pass
def set_trace(self):
raise NotImplementedError("not used")
def reset(self):
pass
def set_continue(self):
pass
monkeypatch.setattr("doctest._OutputRedirectingPdb", MockedPdb) |
class MyDocTestRunner(doctest.DocTestRunner):
def report_failure(self, out, test, example, got):
raise AssertionError(
"'{}' evaluates to '{}', not '{}'".format(
example.source.strip(), got.strip(), example.want.strip()
)
)
return MyDocTestRunner()
class TestApprox:
def test_repr_string(self):
assert repr(approx(1.0)) == "1.0 ± 1.0e-06"
assert repr(approx([1.0, 2.0])) == "approx([1.0 ± 1.0e-06, 2.0 ± 2.0e-06])"
assert repr(approx((1.0, 2.0))) == "approx((1.0 ± 1.0e-06, 2.0 ± 2.0e-06))"
assert repr(approx(inf)) == "inf"
assert repr(approx(1.0, rel=nan)) == "1.0 ± ???"
assert repr(approx(1.0, rel=inf)) == "1.0 ± inf"
# Dictionaries aren't ordered, so we need to check both orders.
assert repr(approx({"a": 1.0, "b": 2.0})) in (
"approx({'a': 1.0 ± 1.0e-06, 'b': 2.0 ± 2.0e-06})",
"approx({'b': 2.0 ± 2.0e-06, 'a': 1.0 ± 1.0e-06})",
)
def test_repr_complex_numbers(self):
assert repr(approx(inf + 1j)) == "(inf+1j)"
assert repr(approx(1.0j, rel=inf)) == "1j ± inf"
# can't compute a sensible tolerance
assert repr(approx(nan + 1j)) == "(nan+1j) ± ???"
assert repr(approx(1.0j)) == "1j ± 1.0e-06 ∠ ±180°"
# relative tolerance is scaled to |3+4j| = 5
assert repr(approx(3 + 4 * 1j)) == "(3+4j) ± 5.0e-06 ∠ ±180°"
# absolute tolerance is not scaled
assert repr(approx(3.3 + 4.4 * 1j, abs=0.02)) == "(3.3+4.4j) ± 2.0e-02 ∠ ±180°"
@pytest.mark.parametrize(
"value, expected_repr_string",
[
(5.0, "approx(5.0 ± 5.0e-06)"),
([5.0], "approx([5.0 ± 5.0e-06])"),
([[5.0]], "approx([[5.0 ± 5.0e-06]])"),
([[5.0, 6.0]], "approx([[5.0 ± 5.0e-06, 6.0 ± 6.0e-06]])"),
([[5.0], [6.0]], "approx([[5.0 ± 5.0e-06], [6.0 ± 6.0e-06]])"),
],
)
def test_repr_nd_array(self, value, expected_repr_string):
"""Make sure that arrays of all different dimensions are repr'd correctly."""
np = pytest.importorskip("numpy")
np_array = np.array(value)
assert repr(approx(np_array)) == expected_repr_string
def test_operator_overloading(self):
assert 1 == approx(1, rel=1e-6, abs=1e-12)
assert not (1 != approx(1, rel=1e-6, abs=1e-12))
assert 10 != approx(1, rel=1e-6, abs=1e-12)
assert not (10 == approx(1, rel=1e-6, abs=1e-12))
def test_exactly_equal(self):
examples = [
(2.0, 2.0),
(0.1e200, 0.1e200),
(1.123e-300, 1.123e-300),
(12345, 12345.0),
(0.0, -0.0),
(345678, 345678),
(Decimal("1.0001"), Decimal("1.0001")),
(Fraction(1, 3), Fraction(-1, -3)),
]
for a, x in examples:
assert a == approx(x)
def test_opposite_sign(self):
examples = [(eq, 1e-100, -1e-100), (ne, 1e100, -1e100)]
for op, a, x in examples:
assert op(a, approx(x))
def test_zero_tolerance(self):
within_1e10 = [(1.1e-100, 1e-100), (-1.1e-100, -1e-100)]
for a, x in within_1e10:
assert x == approx(x, rel=0.0, abs=0.0)
assert a != approx(x, rel=0.0, abs=0.0)
assert a == approx(x, rel=0.0, abs=5e-101)
assert a != approx(x, rel=0.0, abs=5e-102)
assert a == approx(x, rel=5e-1, abs=0.0)
assert a != approx(x, rel=5e-2, abs=0.0)
def test_negative_tolerance(self):
# Negative tolerances are not allowed.
illegal_kwargs = [
dict(rel=-1e100),
dict(abs=-1e100),
dict(rel=1e100, abs=-1e100),
dict(rel=-1e100, abs=1e100),
dict(rel=-1e100, abs=-1e100),
]
for kwargs in illegal_kwargs:
with pytest.raises(ValueError):
1.1 == approx(1, **kwargs)
def test_inf_tolerance(self):
# Everything should be equal if the tolerance is infinite.
large_diffs = [(1, 1000), (1e-50, 1e50), (-1.0, -1e300), (0.0, 10)]
for a, x in large_diffs:
assert a != approx(x, rel=0.0, abs=0.0)
assert a == approx(x, rel=inf, abs=0.0)
assert a == approx(x, rel=0.0, abs=inf)
assert a == approx(x, rel=inf, abs=inf)
def test_inf_tolerance_expecting_zero(self):
# If the relative tolerance is zero but the expected value is infinite,
# the actual tolerance is a NaN, which should be an error.
illegal_kwargs = [dict(rel=inf, abs=0.0), dict(rel=inf, abs=inf)]
for kwargs in illegal_kwargs:
with pytest.raises(ValueError):
1 == approx(0, **kwargs)
def test_nan_tolerance(self):
illegal_kwargs = [dict(rel=nan), dict(abs=nan), dict(rel=nan, abs=nan)]
for kwargs in illegal_kwargs:
with pytest.raises(ValueError):
1.1 == approx(1, **kwargs)
def test_reasonable_defaults(self):
# Whatever the defaults are, they should work for numbers close to 1
# than have a small amount of floating-point error.
assert 0.1 + 0.2 == approx(0.3)
def test_default_tolerances(self):
# This tests the defaults as they are currently set. If you change the
# defaults, this test will fail but you should feel free to change it.
# None of the other tests (except the doctests) should be affected by
# the choice of defaults.
examples = [
# Relative tolerance used.
(eq, 1e100 + 1e94, 1e100),
(ne, 1e100 + 2e94, 1e100),
(eq, 1e0 + 1e-6, 1e0),
(ne, 1e0 + 2e-6, 1e0),
# Absolute tolerance used.
(eq, 1e-100, +1e-106),
(eq, 1e-100, +2e-106),
(eq, 1e-100, 0),
]
for op, a, x in examples:
assert op(a, approx(x))
def test_custom_tolerances(self):
assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e0)
assert 1e8 + 1e0 == approx(1e8, rel=5e-9, abs=5e0)
assert 1e8 + 1e0 == approx(1e8, rel=5e-8, abs=5e-1)
assert 1e8 + 1e0 != approx(1e8, rel=5e-9, abs=5e-1)
assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-8)
assert 1e0 + 1e-8 == approx(1e0, rel=5e-9, abs=5e-8)
assert 1e0 + 1e-8 == approx(1e0, rel=5e-8, abs=5e-9)
assert 1e0 + 1e-8 != approx(1e0, rel=5e-9, abs=5e-9)
assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-16)
assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-9, abs=5e-16)
assert 1e-8 + 1e-16 == approx(1e-8, rel=5e-8, abs=5e-17)
assert 1e-8 + 1e-16 != approx(1e-8, rel=5e-9, abs=5e-17)
def test_relative_tolerance(self):
within_1e8_rel = [(1e8 + 1e0, 1e8), (1e0 + 1e-8, 1e0), (1e-8 + 1e-16, 1e-8)]
for a, x in within_1e8_rel:
assert a == approx(x, rel=5e-8, abs=0.0)
assert a != approx(x, rel=5e-9, abs=0.0)
def test_absolute_tolerance(self):
within_1e8_abs = [(1e8 + 9e-9, 1e8), (1e0 + 9e-9, 1e0), (1e-8 + 9e-9, 1e-8)]
for a, x in within_1e8_abs:
assert a == approx(x, rel=0, abs=5e-8)
assert a != approx(x, rel=0, abs=5e-9)
def test_expecting_zero(self):
examples = [
(ne, 1e-6, 0.0),
(ne, -1e-6, 0.0),
(eq, 1e-12, 0.0),
(eq, -1e-12, 0.0),
(ne, 2e-12, 0.0),
(ne, -2e-12, 0.0),
(ne, inf, 0.0),
(ne, nan, 0.0),
]
for op, a, x in examples:
assert op(a, approx(x, rel=0.0, abs=1e-12))
assert op(a, approx(x, rel=1e-6, abs=1e-12))
def test_expecting_inf(self):
examples = [
(eq, inf, inf),
(eq, -inf, -inf),
(ne, inf, -inf),
(ne, 0.0, inf),
(ne, nan, inf),
]
for op, a, x in examples:
assert op(a, approx(x))
def test_expecting_nan(self):
examples = [
(eq, nan, nan),
(eq, -nan, -nan),
(eq, nan, -nan),
(ne, 0.0, nan),
(ne, inf, nan),
]
for op, a, x in examples:
# Nothing is equal to NaN by default.
assert a != approx(x)
# If ``nan_ok=True``, then NaN is equal to NaN.
assert op(a, approx(x, nan_ok=True))
def test_int(self):
within_1e6 = [(1000001, 1000000), (-1000001, -1000000)]
for a, x in within_1e6:
assert a == approx(x, rel=5e-6, abs=0)
assert a != approx(x, rel=5e-7, abs=0)
assert approx(x, rel=5e-6, abs=0) == a
assert approx(x, rel=5e-7, abs=0) != a
def test_decimal(self):
within_1e6 = [
(Decimal("1.000001"), Decimal("1.0")),
(Decimal("-1.000001"), Decimal("-1.0")),
]
for a, x in within_1e6:
assert a == approx(x)
assert a == approx(x, rel=Decimal("5e-6"), abs=0)
assert a != approx(x, rel=Decimal("5e-7"), abs=0)
assert approx(x, rel=Decimal("5e-6"), abs=0) == a
assert approx(x, rel=Decimal("5e-7"), abs=0) != a
def test_fraction(self):
within_1e6 = [
(1 + Fraction(1, 1000000), Fraction(1)),
(-1 - Fraction(-1, 1000000), Fraction(-1)),
]
for a, x in within_1e6:
assert a == approx(x, rel=5e-6, abs=0)
assert a != approx(x, rel=5e-7, abs=0)
assert approx(x, rel=5e-6, abs=0) == a
assert approx(x, rel=5e-7, abs=0) != a
def test_complex(self):
within_1e6 = [
(1.000001 + 1.0j, 1.0 + 1.0j),
(1.0 + 1.000001j, 1.0 + 1.0j),
(-1.000001 + 1.0j, -1.0 + 1.0j),
(1.0 - 1.000001j, 1.0 - 1.0j),
]
for a, x in within_1e6:
assert a == approx(x, rel=5e-6, abs=0)
assert a != approx(x, rel=5e-7, abs=0)
assert approx(x, rel=5e-6, abs=0) == a
assert approx(x, rel=5e-7, abs=0) != a
def test_list(self):
actual = [1 + 1e-7, 2 + 1e-8]
expected = [1, 2]
# Return false if any element is outside the tolerance.
assert actual == approx(expected, rel=5e-7, abs=0)
assert actual != approx(expected, rel=5e-8, abs=0)
assert approx(expected, rel=5e-7, abs=0) == actual
assert approx(expected, rel=5e-8, abs=0) != actual
def test_list_wrong_len(self):
assert [1, 2] != approx([1])
assert [1, 2] != approx([1, 2, 3])
def test_tuple(self):
actual = (1 + 1e-7, 2 + 1e-8)
expected = (1, 2)
# Return false if any element is outside the tolerance.
assert actual == approx(expected, rel=5e-7, abs=0)
assert actual != approx(expected, rel=5e-8, abs=0)
assert approx(expected, rel=5e-7, abs=0) == actual
assert approx(expected, rel=5e-8, abs=0) != actual
def test_tuple_wrong_len(self):
assert (1, 2) != approx((1,))
assert (1, 2) != approx((1, 2, 3))
def test_dict(self):
actual = {"a": 1 + 1e-7, "b": 2 + 1e-8}
# Dictionaries became ordered in python3.6, so switch up the order here
# to make sure it doesn't matter.
expected = {"b": 2, "a": 1}
# Return false if any element is outside the tolerance.
assert actual == approx(expected, rel=5e-7, abs=0)
assert actual != approx(expected, rel=5e-8, abs=0)
assert approx(expected, rel=5e-7, abs=0) == actual
assert approx(expected, rel=5e-8, abs=0) != actual
def test_dict_wrong_len(self):
assert {"a": 1, "b": 2} != approx({"a": 1})
assert {"a": 1, "b": 2} != approx({"a": 1, "c": 2})
assert {"a": 1, "b": 2} != approx({"a": 1, "b": 2, "c": 3})
def test_numpy_array(self):
np = pytest.importorskip("numpy")
actual = np.array([1 + 1e-7, 2 + 1e-8])
expected = np.array([1, 2])
# Return false if any element is outside the tolerance.
assert actual == approx(expected, rel=5e-7, abs=0)
assert actual != approx(expected, rel=5e-8, abs=0)
assert approx(expected, rel=5e-7, abs=0) == expected
assert approx(expected, rel=5e-8, abs=0) != actual
# Should be able to compare lists with numpy arrays.
assert list(actual) == approx(expected, rel=5e-7, abs=0)
assert list(actual) != approx(expected, rel=5e-8, abs=0)
assert actual == approx(list(expected), rel=5e-7, abs=0)
assert actual != approx(list(expected), rel=5e-8, abs=0)
def test_numpy_tolerance_args(self):
"""
Check that numpy rel/abs args are handled correctly
for comparison against an np.array
Check both sides of the operator, hopefully it doesn't impact things.
Test all permutations of where the approx and np.array() can show up
"""
np = pytest.importorskip("numpy")
expected = 100.0
actual = 99.0
abs_diff = expected - actual
rel_diff = (expected - actual) / expected
tests = [
(eq, abs_diff, 0),
(eq, 0, rel_diff),
(ne, 0, rel_diff / 2.0), # rel diff fail
(ne, abs_diff / 2.0, 0), # abs diff fail
]
for op, _abs, _rel in tests:
assert op(np.array(actual), approx(expected, abs=_abs, rel=_rel)) # a, b
assert op(approx(expected, abs=_abs, rel=_rel), np.array(actual)) # b, a
assert op(actual, approx(np.array(expected), abs=_abs, rel=_rel)) # a, b
assert op(approx(np.array(expected), abs=_abs, rel=_rel), actual) # b, a
assert op(np.array(actual), approx(np.array(expected), abs=_abs, rel=_rel))
assert op(approx(np.array(expected), abs=_abs, rel=_rel), np.array(actual))
def test_numpy_expecting_nan(self):
np = pytest.importorskip("numpy")
examples = [
(eq, nan, nan),
(eq, -nan, -nan),
(eq, nan, -nan),
(ne, 0.0, nan),
(ne, inf, nan),
]
for op, a, x in examples:
# Nothing is equal to NaN by default.
assert np.array(a) != approx(x)
assert a != approx(np.array(x))
# If ``nan_ok=True``, then NaN is equal to NaN.
assert op(np.array(a), approx(x, nan_ok=True))
assert op(a, approx(np.array(x), nan_ok=True))
def test_numpy_expecting_inf(self):
np = pytest.importorskip("numpy")
examples = [
(eq, inf, inf),
(eq, -inf, -inf),
(ne, inf, -inf),
(ne, 0.0, inf),
(ne, nan, inf),
]
for op, a, x in examples:
assert op(np.array(a), approx(x))
assert op(a, approx(np.array(x)))
assert op(np.array(a), approx(np.array(x)))
def test_numpy_array_wrong_shape(self):
np = pytest.importorskip("numpy")
a12 = np.array([[1, 2]])
a21 = np.array([[1], [2]])
assert a12 != approx(a21)
assert a21 != approx(a12)
def test_doctests(self, mocked_doctest_runner):
import doctest
parser = doctest.DocTestParser()
test = parser.get_doctest(
approx.__doc__, {"approx": approx}, approx.__name__, None, None
)
mocked_doctest_runner.run(test)
def test_unicode_plus_minus(self, testdir):
"""
Comparing approx instances inside lists should not produce an error in the detailed diff.
Integration test for issue #2111.
"""
testdir.makepyfile(
"""
import pytest
def test_foo():
assert [3] == [pytest.approx(4)]
"""
)
expected = "4.0e-06"
result = testdir.runpytest()
result.stdout.fnmatch_lines(
["*At index 0 diff: 3 != 4 * {}".format(expected), "=* 1 failed in *="]
)
@pytest.mark.parametrize(
"x",
[
pytest.param(None),
pytest.param("string"),
pytest.param(["string"], id="nested-str"),
pytest.param([[1]], id="nested-list"),
pytest.param({"key": "string"}, id="dict-with-string"),
pytest.param({"key": {"key": 1}}, id="nested-dict"),
],
)
def test_expected_value_type_error(self, x):
with pytest.raises(TypeError):
approx(x)
@pytest.mark.parametrize(
"op",
[
pytest.param(operator.le, id="<="),
pytest.param(operator.lt, id="<"),
pytest.param(operator.ge, id=">="),
pytest.param(operator.gt, id=">"),
],
)
def test_comparison_operator_type_error(self, op):
"""
pytest.approx should raise TypeError for operators other than == and != (#2003).
"""
with pytest.raises(TypeError):
op(1, approx(1, rel=1e-6, abs=1e-12))
def test_numpy_array_with_scalar(self):
np = pytest.importorskip("numpy")
actual = np.array([1 + 1e-7, 1 - 1e-8])
expected = 1.0
assert actual == approx(expected, rel=5e-7, abs=0)
assert actual != approx(expected, rel=5e-8, abs=0)
assert approx(expected, rel=5e-7, abs=0) == actual
assert approx(expected, rel=5e-8, abs=0) != actual
def test_numpy_scalar_with_array(self):
np = pytest.importorskip("numpy")
actual = 1.0
expected = np.array([1 + 1e-7, 1 - 1e-8])
assert actual == approx(expected, rel=5e-7, abs=0)
assert actual != approx(expected, rel=5e-8, abs=0)
assert approx(expected, rel=5e-7, abs=0) == actual
assert approx(expected, rel=5e-8, abs=0) != actual
def test_generic_sized_iterable_object(self):
class MySizedIterable:
def __iter__(self):
return iter([1, 2, 3, 4])
def __len__(self):
return 4
expected = MySizedIterable()
assert [1, 2, 3, 4] == approx(expected) | |
Perl.py | import re
def extract_libraries(files):
| """Extracts a list of imports that were used in the files
Parameters
----------
files : []string
Full paths to files that need to be analysed
Returns
-------
dict
imports that were used in the provided files, mapped against the language
"""
res = []
# regex to find imports
regex = re.compile(r"(?:[^#]\s+)(?:use|require)[^\S\n]+(?:if.*,\s+)?[\"']?([a-zA-Z][a-zA-Z0-9:]*)[\"']?(?:\s+.*)?;")
for f in files:
with open(file=f, mode='r', errors='ignore') as fr:
contents = ' '.join(fr.readlines())
matches = regex.findall(contents)
if matches:
res.extend(matches)
return {"Perl": res} |
|
lock_windows.go | // +build windows
/*
* Minio Cloud Storage, (C) 2016, 2017 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package lock
import (
"errors"
"fmt"
"os"
"syscall"
"unsafe"
)
var (
modkernel32 = syscall.NewLazyDLL("kernel32.dll")
procLockFileEx = modkernel32.NewProc("LockFileEx")
errLocked = errors.New("The process cannot access the file because another process has locked a portion of the file.")
)
const (
// see https://msdn.microsoft.com/en-us/library/windows/desktop/ms681382(v=vs.85).aspx
errLockViolation syscall.Errno = 0x21
)
// LockedOpenFile - initializes a new lock and protects
// the file from concurrent access.
func LockedOpenFile(path string, flag int, perm os.FileMode) (*LockedFile, error) {
f, err := open(path, flag, perm)
if err != nil {
return nil, err
}
if err = lockFile(syscall.Handle(f.Fd()), 0); err != nil {
f.Close()
return nil, err
}
st, err := os.Stat(path)
if err != nil {
f.Close()
return nil, err
}
if st.IsDir() {
f.Close()
return nil, &os.PathError{
Op: "open",
Path: path,
Err: syscall.EISDIR,
}
}
return &LockedFile{File: f}, nil
}
// perm param is ignored, on windows file perms/NT acls
// are not octet combinations. Providing access to NT
// acls is out of scope here.
func open(path string, flag int, perm os.FileMode) (*os.File, error) {
if path == "" {
return nil, syscall.ERROR_FILE_NOT_FOUND
}
pathp, err := syscall.UTF16PtrFromString(path)
if err != nil {
return nil, err
}
var access uint32
switch flag {
case syscall.O_RDONLY:
access = syscall.GENERIC_READ
case syscall.O_WRONLY:
access = syscall.GENERIC_WRITE
case syscall.O_RDWR:
fallthrough
case syscall.O_RDWR | syscall.O_CREAT:
fallthrough
case syscall.O_WRONLY | syscall.O_CREAT:
access = syscall.GENERIC_READ | syscall.GENERIC_WRITE
default:
return nil, fmt.Errorf("Unsupported flag (%d)", flag)
}
var createflag uint32
switch {
case flag&syscall.O_CREAT == syscall.O_CREAT:
createflag = syscall.OPEN_ALWAYS
default:
createflag = syscall.OPEN_EXISTING
}
shareflag := uint32(syscall.FILE_SHARE_READ | syscall.FILE_SHARE_WRITE | syscall.FILE_SHARE_DELETE)
accessAttr := uint32(syscall.FILE_ATTRIBUTE_NORMAL | 0x80000000)
fd, err := syscall.CreateFile(pathp, access, shareflag, nil, createflag, accessAttr, 0)
if err != nil {
return nil, err
}
return os.NewFile(uintptr(fd), path), nil
}
func lockFile(fd syscall.Handle, flags uint32) error {
// https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203(v=vs.85).aspx
var flag uint32 = 2 // Lockfile exlusive.
flag |= flags
if fd == syscall.InvalidHandle {
return nil
}
err := lockFileEx(fd, flag, 1, 0, &syscall.Overlapped{})
if err == nil {
return nil
} else if err.Error() == errLocked.Error() {
return errors.New("lock already acquired")
} else if err != errLockViolation {
return err
}
return nil
}
func lockFileEx(h syscall.Handle, flags, locklow, lockhigh uint32, ol *syscall.Overlapped) (err error) | {
var reserved = uint32(0)
r1, _, e1 := syscall.Syscall6(procLockFileEx.Addr(), 6, uintptr(h), uintptr(flags), uintptr(reserved), uintptr(locklow), uintptr(lockhigh), uintptr(unsafe.Pointer(ol)))
if r1 == 0 {
if e1 != 0 {
err = error(e1)
} else {
err = syscall.EINVAL
}
}
return
} |
|
_position_weight_matrix.py | def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__,'_position_weight_matrix.so') | __bootstrap__() | __loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__) |
app-d41563861a0c58391d30.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[2],[function(e,t,n){"use strict";e.exports=n(122)},function(e,t,n){n(46),n(48);var r=n(103),o=n(2).publicLoader,i=o.getResourcesForPathname,a=o.getResourcesForPathnameSync,l=o.getResourceURLsForPathname;t.apiRunner=function(e,t,n,o){void 0===t&&(t={});var u=r.map(function(n){if(n.plugin[e]){t.getResourcesForPathnameSync=a,t.getResourcesForPathname=i,t.getResourceURLsForPathname=l;var r=n.plugin[e](t,n.options);return r&&o&&(t=o({args:t,result:r,plugin:n})),r}});return(u=u.filter(function(e){return void 0!==e})).length>0?u:n?[n]:[]},t.apiRunnerAsync=function(e,t,n){return r.reduce(function(n,r){return r.plugin[e]?n.then(function(){return r.plugin[e](t,r.options)}):n},Promise.resolve())}},function(e,t,n){"use strict";n.r(t);n(105),n(106),n(50),n(109),n(114),n(66),n(120),n(46),n(48),n(121);var r=n(24),o={},i=n(10),a=function(e){if("undefined"==typeof document)return!1;var t=document.createElement("link");try{if(t.relList&&"function"==typeof t.relList.supports)return t.relList.supports(e)}catch(n){return!1}return!1}("prefetch")?function(e){return new Promise(function(t,n){if("undefined"!=typeof document){var r=document.createElement("link");r.setAttribute("rel","prefetch"),r.setAttribute("href",e),r.onload=t,r.onerror=n,(document.getElementsByTagName("head")[0]||document.getElementsByName("script")[0].parentNode).appendChild(r)}else n()})}:function(e){return new Promise(function(t,n){var r=new XMLHttpRequest;r.open("GET",e,!0),r.withCredentials=!0,r.onload=function(){200===r.status?t():n()},r.send(null)})},l={},u=function(e){return new Promise(function(t){l[e]?t():a(e).then(function(){t(),l[e]=!0}).catch(function(){})})};n.d(t,"postInitialRenderWork",function(){return U}),n.d(t,"setApiRunnerForLoader",function(){return I}),n.d(t,"publicLoader",function(){return F});var c,s=function(e){return e&&e.default||e},f=!0,d=Object.create(null),p={},h={},m=[],v=null,y=!1,g=!1,b={},w={};var _,x=function(){return v||(v=new Promise(function(e){p.data().then(function(t){var n=t.pages,r=t.dataPaths;window.___dataPaths=r,M.addPagesArray(n),M.addDataPaths(r),g=!0,e(y=!0)}).catch(function(t){console.warn("Failed to fetch pages manifest. Gatsby will reload on next navigation."),e(y=!0)})})),v},k=function(e){return"/team-report-a-cybercrime/static/d/"+e+".json"},S=function(e){return window.___chunkMapping[e].map(function(e){return"/team-report-a-cybercrime"+e})},T=function(e){if("component---"===e.slice(0,12))return Promise.all(S(e).map(function(e){return u(e)}));var t=k(h[e]);return u(t)},E=function(e){return function(e){var t;return t="component---"===e.slice(0,12)?p.components[e]:e in w?function(){return w[e]}:function(){var t=new Promise(function(t,n){var r=k(h[e]),o=new XMLHttpRequest;o.open("GET",r,!0),o.withCredentials=!0,o.onreadystatechange=function(){4==o.readyState&&(200===o.status?t(JSON.parse(o.responseText)):(delete w[e],n()))},o.send(null)});return w[e]=t,t},d[e]=!0,new Promise(function(n){var r=t(),o=!1;return r.catch(function(){o=!0}).then(function(t){m.push({resource:e,succeeded:!o}),m=m.slice(-5),n(t)})})}(e).then(s)},P=function(e,t){var n;b[e]||(b[e]=t),("boolean"==typeof(n=navigator.onLine)?n:m.find(function(e){return e.succeeded}))&&window.location.pathname.replace(/\/$/g,"")!==e.replace(/\/$/g,"")&&(window.location.pathname=e)},C=function(e){j[e]||(c("onPostPrefetchPathname",{pathname:e}),j[e]=!0)},R=function(e){return(g||f)&&"/404.html"!==e},O={},N={},j={},L=!1,M={addPagesArray:function(e){var t,n;t=e,void 0===(n="/team-report-a-cybercrime")&&(n=""),_=function(e){var i,a,l,u=decodeURIComponent(e),c=(void 0===(a=n)&&(a=""),(i=u).substr(0,a.length)===a?i.slice(a.length):i);return c.split("#").length>1&&(c=c.split("#").slice(0,-1).join("")),c.split("?").length>1&&(c=c.split("?").slice(0,-1).join("")),o[c]?o[c]:(t.some(function(e){var t=e.matchPath?e.matchPath:e.path;return Object(r.match)(t,c)?(l=e,o[c]=e,!0):!!Object(r.match)(e.path+"index.html",c)&&(l=e,o[c]=e,!0)}),l)}},addDevRequires:function(e){e},addProdRequires:function(e){p=e},addDataPaths:function(e){h=e},hovering:function(e){M.getResourcesForPathname(e)},enqueue:function(e){if(c||console.error("Run setApiRunnerForLoader() before enqueing paths"),"connection"in navigator){if((navigator.connection.effectiveType||"").includes("2g"))return!1;if(navigator.connection.saveData)return!1}var t;if(N[t=e]||(c("onPrefetchPathname",{pathname:t}),N[t]=!0),L.some(function(e){return e}))return!1;var n=_(e);return n||y?!!n&&(Promise.all([T(n.jsonName),T(n.componentChunkName)]).then(function(){C(e)}),!0):x().then(function(){return M.enqueue(e)})},getPage:function(e){return _(e)},getResourceURLsForPathname:function(e){var t=_(e);return t?[].concat(S(t.componentChunkName),[k(h[t.jsonName])]):null},getResourcesForPathnameSync:function(e){var t=_(e);return t?O[t.path]:R(e)?M.getResourcesForPathnameSync("/404.html"):null},getResourcesForPathname:function(e){return new Promise(function(t,n){if(b[e])return P(e,'Previously detected load failure for "'+e+'"'),void n();var r=_(e);if(r||y){if(!r)return R(e)?(console.log("A page wasn't found for \""+e+'"'),void t(M.getResourcesForPathname("/404.html"))):void t();if(e=r.path,O[e])return i.a.emit("onPostLoadPageResources",{page:r,pageResources:O[e]}),void t(O[e]);i.a.emit("onPreLoadPageResources",{path:e}),Promise.all([E(r.componentChunkName),E(r.jsonName)]).then(function(n){var o=n[0],a=n[1];if(o&&a){var l={component:o,json:a,page:r};l.page.jsonURL=k(h[r.jsonName]),O[e]=l,t(l),i.a.emit("onPostLoadPageResources",{page:r,pageResources:l}),C(e)}else t(null)})}else x().then(function(){return t(M.getResourcesForPathname(e))})})}},U=function(){f=!1,x()},I=function(e){L=(c=e)("disableCorePrefetching")},F={getResourcesForPathname:M.getResourcesForPathname,getResourceURLsForPathname:M.getResourceURLsForPathname,getResourcesForPathnameSync:M.getResourcesForPathnameSync};t.default=M},function(e,t,n){var r=n(36)("wks"),o=n(38),i=n(6).Symbol,a="function"==typeof i;(e.exports=function(e){return r[e]||(r[e]=a&&i[e]||(a?i:o)("Symbol."+e))}).store=r},function(e,t,n){e.exports=n(126)()},function(e,t,n){var r=n(11);e.exports=function(e){if(!r(e))throw TypeError(e+" is not an object!");return e}},function(e,t){var n=e.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=n)},function(e,t){e.exports=function(e,t){e.prototype=Object.create(t.prototype),e.prototype.constructor=e,e.__proto__=t}},function(e,t){e.exports=function(e){return e&&e.__esModule?e:{default:e}}},function(e,t,n){"use strict";e.exports=function(e,t,n,r,o,i,a,l){if(!e){var u;if(void 0===t)u=new Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings.");else{var c=[n,r,o,i,a,l],s=0;(u=new Error(t.replace(/%s/g,function(){return c[s++]}))).name="Invariant Violation"}throw u.framesToPop=1,u}}},function(e,t,n){"use strict";var r=function(e){return e=e||Object.create(null),{on:function(t,n){(e[t]||(e[t]=[])).push(n)},off:function(t,n){e[t]&&e[t].splice(e[t].indexOf(n)>>>0,1)},emit:function(t,n){(e[t]||[]).slice().map(function(e){e(n)}),(e["*"]||[]).slice().map(function(e){e(t,n)})}}}();t.a=r},function(e,t){e.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},function(e,t,n){var r=n(6),o=n(18),i=n(13),a=n(15),l=n(21),u=function(e,t,n){var c,s,f,d,p=e&u.F,h=e&u.G,m=e&u.S,v=e&u.P,y=e&u.B,g=h?r:m?r[t]||(r[t]={}):(r[t]||{}).prototype,b=h?o:o[t]||(o[t]={}),w=b.prototype||(b.prototype={});for(c in h&&(n=t),n)f=((s=!p&&g&&void 0!==g[c])?g:n)[c],d=y&&s?l(f,r):v&&"function"==typeof f?l(Function.call,f):f,g&&a(g,c,f,e&u.U),b[c]!=f&&i(b,c,d),v&&w[c]!=f&&(w[c]=f)};r.core=o,u.F=1,u.G=2,u.S=4,u.P=8,u.B=16,u.W=32,u.U=64,u.R=128,e.exports=u},function(e,t,n){var r=n(28),o=n(57);e.exports=n(19)?function(e,t,n){return r.f(e,t,o(1,n))}:function(e,t,n){return e[t]=n,e}},function(e,t){var n={}.toString;e.exports=function(e){return n.call(e).slice(8,-1)}},function(e,t,n){var r=n(6),o=n(13),i=n(29),a=n(38)("src"),l=n(83),u=(""+l).split("toString");n(18).inspectSource=function(e){return l.call(e)},(e.exports=function(e,t,n,l){var c="function"==typeof n;c&&(i(n,"name")||o(n,"name",t)),e[t]!==n&&(c&&(i(n,a)||o(n,a,e[t]?""+e[t]:u.join(String(t)))),e===r?e[t]=n:l?e[t]?e[t]=n:o(e,t,n):(delete e[t],o(e,t,n)))})(Function.prototype,"toString",function(){return"function"==typeof this&&this[a]||l.call(this)})},function(e,t,n){var r=n(31),o=Math.min;e.exports=function(e){return e>0?o(r(e),9007199254740991):0}},function(e,t,n){"use strict";n.r(t);var r=n(0),o=n.n(r),i=(n(68),n(4),n(9)),a=n.n(i),l=o.a.createContext,u=n(70),c=function(e,t){return e.substr(0,t.length)===t},s=function(e,t){for(var n=void 0,r=void 0,o=t.split("?")[0],i=y(o),l=""===i[0],u=v(e),c=0,s=u.length;c<s;c++){var f=!1,d=u[c].route;if(d.default)r={route:d,params:{},uri:t};else{for(var h=y(d.path),m={},g=Math.max(i.length,h.length),w=0;w<g;w++){var _=h[w],x=i[w];if("*"===_){m["*"]=i.slice(w).map(decodeURIComponent).join("/");break}if(void 0===x){f=!0;break}var k=p.exec(_);if(k&&!l){-1===b.indexOf(k[1])||a()(!1);var S=decodeURIComponent(x);m[k[1]]=S}else if(_!==x){f=!0;break}}if(!f){n={route:d,params:m,uri:"/"+i.slice(0,w).join("/")};break}}}return n||r||null},f=function(e,t){if(c(e,"/"))return e;var n=e.split("?"),r=n[0],o=n[1],i=t.split("?")[0],a=y(r),l=y(i);if(""===a[0])return g(i,o);if(!c(a[0],".")){var u=l.concat(a).join("/");return g(("/"===i?"":"/")+u,o)}for(var s=l.concat(a),f=[],d=0,p=s.length;d<p;d++){var h=s[d];".."===h?f.pop():"."!==h&&f.push(h)}return g("/"+f.join("/"),o)},d=function(e,t){return"/"+y(e).map(function(e){var n=p.exec(e);return n?t[n[1]]:e}).join("/")},p=/^:(.+)/,h=function(e){return p.test(e)},m=function(e,t){return{route:e,score:e.default?0:y(e.path).reduce(function(e,t){return e+=4,!function(e){return""===e}(t)?h(t)?e+=2:!function(e){return"*"===e}(t)?e+=3:e-=5:e+=1,e},0),index:t}},v=function(e){return e.map(m).sort(function(e,t){return e.score<t.score?1:e.score>t.score?-1:e.index-t.index})},y=function(e){return e.replace(/(^\/+|\/+$)/g,"").split("/")},g=function(e,t){return e+(t?"?"+t:"")},b=["uri","path"],w=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},_=function(e){return w({},e.location,{state:e.history.state,key:e.history.state&&e.history.state.key||"initial"})},x=function(e,t){var n=[],r=_(e),o=!1,i=function(){};return{get location(){return r},get transitioning(){return o},_onTransitionComplete:function(){o=!1,i()},listen:function(t){n.push(t);var o=function(){r=_(e),t({location:r,action:"POP"})};return e.addEventListener("popstate",o),function(){e.removeEventListener("popstate",o),n=n.filter(function(e){return e!==t})}},navigate:function(t){var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},l=a.state,u=a.replace,c=void 0!==u&&u;l=w({},l,{key:Date.now()+""});try{o||c?e.history.replaceState(l,null,t):e.history.pushState(l,null,t)}catch(f){e.location[c?"replace":"assign"](t)}r=_(e),o=!0;var s=new Promise(function(e){return i=e});return n.forEach(function(e){return e({location:r,action:"PUSH"})}),s}}},k=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"/",t=0,n=[{pathname:e,search:""}],r=[];return{get location(){return n[t]},addEventListener:function(e,t){},removeEventListener:function(e,t){},history:{get entries(){return n},get index(){return t},get state(){return r[t]},pushState:function(e,o,i){var a=i.split("?"),l=a[0],u=a[1],c=void 0===u?"":u;t++,n.push({pathname:l,search:c}),r.push(e)},replaceState:function(e,o,i){var a=i.split("?"),l=a[0],u=a[1],c=void 0===u?"":u;n[t]={pathname:l,search:c},r[t]=e}}}},S=!("undefined"==typeof window||!window.document||!window.document.createElement),T=x(S?window:k()),E=T.navigate;n.d(t,"Link",function(){return K}),n.d(t,"Location",function(){return M}),n.d(t,"LocationProvider",function(){return U}),n.d(t,"Match",function(){return Z}),n.d(t,"Redirect",function(){return J}),n.d(t,"Router",function(){return A}),n.d(t,"ServerLocation",function(){return I}),n.d(t,"isRedirect",function(){return G}),n.d(t,"redirectTo",function(){return Y}),n.d(t,"createHistory",function(){return x}),n.d(t,"createMemorySource",function(){return k}),n.d(t,"navigate",function(){return E}),n.d(t,"globalHistory",function(){return T});var P=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e};function C(e,t){var n={};for(var r in e)t.indexOf(r)>=0||Object.prototype.hasOwnProperty.call(e,r)&&(n[r]=e[r]);return n}function R(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function O(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!=typeof t&&"function"!=typeof t?e:t}function N(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+typeof t);e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t)}var j=function(e,t){var n=l(t);return n.Consumer.displayName=e+".Consumer",n.Provider.displayName=e+".Provider",n},L=j("Location"),M=function(e){var t=e.children;return o.a.createElement(L.Consumer,null,function(e){return e?t(e):o.a.createElement(U,null,t)})},U=function(e){function t(){var n,r;R(this,t);for(var o=arguments.length,i=Array(o),a=0;a<o;a++)i[a]=arguments[a];return n=r=O(this,e.call.apply(e,[this].concat(i))),r.state={context:r.getContext(),refs:{unlisten:null}},O(r,n)}return N(t,e),t.prototype.getContext=function(){var e=this.props.history;return{navigate:e.navigate,location:e.location}},t.prototype.componentDidCatch=function(e,t){if(!G(e))throw e;(0,this.props.history.navigate)(e.uri,{replace:!0})},t.prototype.componentDidUpdate=function(e,t){t.context.location!==this.state.context.location&&this.props.history._onTransitionComplete()},t.prototype.componentDidMount=function(){var e=this,t=this.state.refs,n=this.props.history;t.unlisten=n.listen(function(){Promise.resolve().then(function(){requestAnimationFrame(function(){e.unmounted||e.setState(function(){return{context:e.getContext()}})})})})},t.prototype.componentWillUnmount=function(){var e=this.state.refs;this.unmounted=!0,e.unlisten()},t.prototype.render=function(){var e=this.state.context,t=this.props.children;return o.a.createElement(L.Provider,{value:e},"function"==typeof t?t(e):t||null)},t}(o.a.Component);U.defaultProps={history:T};var I=function(e){var t=e.url,n=e.children;return o.a.createElement(L.Provider,{value:{location:{pathname:t,search:"",hash:""},navigate:function(){throw new Error("You can't call navigate on the server.")}}},n)},F=j("Base",{baseuri:"/",basepath:"/"}),A=function(e){return o.a.createElement(F.Consumer,null,function(t){return o.a.createElement(M,null,function(n){return o.a.createElement(D,P({},t,n,e))})})},D=function(e){function t(){return R(this,t),O(this,e.apply(this,arguments))}return N(t,e),t.prototype.render=function(){var e=this.props,t=e.location,n=e.navigate,r=e.basepath,i=e.primary,a=e.children,l=(e.baseuri,e.component),u=void 0===l?"div":l,c=C(e,["location","navigate","basepath","primary","children","baseuri","component"]),d=o.a.Children.map(a,te(r)),p=t.pathname,h=s(d,p);if(h){var m=h.params,v=h.uri,y=h.route,g=h.route.value;r=y.default?r:y.path.replace(/\*$/,"");var b=P({},m,{uri:v,location:t,navigate:function(e,t){return n(f(e,v),t)}}),w=o.a.cloneElement(g,b,g.props.children?o.a.createElement(A,{primary:i},g.props.children):void 0),_=i?W:u,x=i?P({uri:v,location:t,component:u},c):c;return o.a.createElement(F.Provider,{value:{baseuri:v,basepath:r}},o.a.createElement(_,x,w))}return null},t}(o.a.PureComponent);D.defaultProps={primary:!0};var z=j("Focus"),W=function(e){var t=e.uri,n=e.location,r=e.component,i=C(e,["uri","location","component"]);return o.a.createElement(z.Consumer,null,function(e){return o.a.createElement(V,P({},i,{component:r,requestFocus:e,uri:t,location:n}))})},B=!0,H=0,V=function(e){function t(){var n,r;R(this,t);for(var o=arguments.length,i=Array(o),a=0;a<o;a++)i[a]=arguments[a];return n=r=O(this,e.call.apply(e,[this].concat(i))),r.state={},r.requestFocus=function(e){r.state.shouldFocus||e.focus()},O(r,n)}return N(t,e),t.getDerivedStateFromProps=function(e,t){if(null==t.uri)return P({shouldFocus:!0},e);var n=e.uri!==t.uri,r=t.location.pathname!==e.location.pathname&&e.location.pathname===e.uri;return P({shouldFocus:n||r},e)},t.prototype.componentDidMount=function(){H++,this.focus()},t.prototype.componentWillUnmount=function(){0===--H&&(B=!0)},t.prototype.componentDidUpdate=function(e,t){e.location!==this.props.location&&this.state.shouldFocus&&this.focus()},t.prototype.focus=function(){var e=this.props.requestFocus;e?e(this.node):B?B=!1:this.node.contains(document.activeElement)||this.node.focus()},t.prototype.render=function(){var e=this,t=this.props,n=(t.children,t.style),r=(t.requestFocus,t.role),i=void 0===r?"group":r,a=t.component,l=void 0===a?"div":a,u=(t.uri,t.location,C(t,["children","style","requestFocus","role","component","uri","location"]));return o.a.createElement(l,P({style:P({outline:"none"},n),tabIndex:"-1",role:i,ref:function(t){return e.node=t}},u),o.a.createElement(z.Provider,{value:this.requestFocus},this.props.children))},t}(o.a.Component);Object(u.polyfill)(V);var $=function(){},q=o.a.forwardRef;void 0===q&&(q=function(e){return e});var K=q(function(e,t){var n=e.innerRef,r=C(e,["innerRef"]);return o.a.createElement(F.Consumer,null,function(e){e.basepath;var i=e.baseuri;return o.a.createElement(M,null,function(e){var a=e.location,l=e.navigate,u=r.to,s=r.state,d=r.replace,p=r.getProps,h=void 0===p?$:p,m=C(r,["to","state","replace","getProps"]),v=f(u,i),y=a.pathname===v,g=c(a.pathname,v);return o.a.createElement("a",P({ref:t||n,"aria-current":y?"page":void 0},m,h({isCurrent:y,isPartiallyCurrent:g,href:v,location:a}),{href:v,onClick:function(e){m.onClick&&m.onClick(e),ne(e)&&(e.preventDefault(),l(v,{state:s,replace:d}))}}))})})});function Q(e){this.uri=e}var G=function(e){return e instanceof Q},Y=function(e){throw new Q(e)},X=function(e){function t(){return R(this,t),O(this,e.apply(this,arguments))}return N(t,e),t.prototype.componentDidMount=function(){var e=this.props,t=e.navigate,n=e.to,r=(e.from,e.replace),o=void 0===r||r,i=e.state,a=(e.noThrow,C(e,["navigate","to","from","replace","state","noThrow"]));Promise.resolve().then(function(){t(d(n,a),{replace:o,state:i})})},t.prototype.render=function(){var e=this.props,t=(e.navigate,e.to),n=(e.from,e.replace,e.state,e.noThrow),r=C(e,["navigate","to","from","replace","state","noThrow"]);return n||Y(d(t,r)),null},t}(o.a.Component),J=function(e){return o.a.createElement(M,null,function(t){return o.a.createElement(X,P({},t,e))})},Z=function(e){var t=e.path,n=e.children;return o.a.createElement(F.Consumer,null,function(e){var r=e.baseuri;return o.a.createElement(M,null,function(e){var o=e.navigate,i=e.location,a=function(e,t){return s([{path:e}],t)}(f(t,r),i.pathname);return n({navigate:o,location:i,match:a?P({},a.params,{uri:a.uri,path:t}):null})})})},ee=function(e){return e.replace(/(^\/+|\/+$)/g,"")},te=function(e){return function(t){if(!t)return null;var n,r,o;if(t.props.path||t.props.default||t.type===J||a()(!1),t.type!==J||t.props.from&&t.props.to||a()(!1),t.type===J&&(n=t.props.from,r=t.props.to,o=function(e){return h(e)},y(n).filter(o).sort().join("/")!==y(r).filter(o).sort().join("/"))&&a()(!1),t.props.default)return{value:t,default:!0};var i=t.type===J?t.props.from:t.props.path,l="/"===i?e:ee(e)+"/"+ee(i);return{value:t,default:t.props.default,path:t.props.children?ee(l)+"/*":l}}},ne=function(e){return!e.defaultPrevented&&0===e.button&&!(e.metaKey||e.altKey||e.ctrlKey||e.shiftKey)}},function(e,t){var n=e.exports={version:"2.6.5"};"number"==typeof __e&&(__e=n)},function(e,t,n){e.exports=!n(20)(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},function(e,t){e.exports=function(e){try{return!!e()}catch(t){return!0}}},function(e,t,n){var r=n(30);e.exports=function(e,t,n){if(r(e),void 0===t)return e;switch(n){case 1:return function(n){return e.call(t,n)};case 2:return function(n,r){return e.call(t,n,r)};case 3:return function(n,r,o){return e.call(t,n,r,o)}}return function(){return e.apply(t,arguments)}}},function(e,t){e.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},function(e,t){e.exports={}},function(e,t,n){"use strict";t.__esModule=!0,t.validateRedirect=t.insertParams=t.resolve=t.match=t.pick=t.startsWith=void 0;var r,o=n(9),i=(r=o)&&r.__esModule?r:{default:r};var a=function(e,t){return e.substr(0,t.length)===t},l=function(e,t){for(var n=void 0,r=void 0,o=t.split("?")[0],a=d(o),l=""===a[0],c=f(e),s=0,p=c.length;s<p;s++){var m=!1,v=c[s].route;if(v.default)r={route:v,params:{},uri:t};else{for(var y=d(v.path),g={},b=Math.max(a.length,y.length),w=0;w<b;w++){var _=y[w],x=a[w];if("*"===_){g["*"]=a.slice(w).map(decodeURIComponent).join("/");break}if(void 0===x){m=!0;break}var k=u.exec(_);if(k&&!l){-1===h.indexOf(k[1])||(0,i.default)(!1);var S=decodeURIComponent(x);g[k[1]]=S}else if(_!==x){m=!0;break}}if(!m){n={route:v,params:g,uri:"/"+a.slice(0,w).join("/")};break}}}return n||r||null},u=/^:(.+)/,c=function(e){return u.test(e)},s=function(e,t){return{route:e,score:e.default?0:d(e.path).reduce(function(e,t){return e+=4,!function(e){return""===e}(t)?c(t)?e+=2:!function(e){return"*"===e}(t)?e+=3:e-=5:e+=1,e},0),index:t}},f=function(e){return e.map(s).sort(function(e,t){return e.score<t.score?1:e.score>t.score?-1:e.index-t.index})},d=function(e){return e.replace(/(^\/+|\/+$)/g,"").split("/")},p=function(e,t){return e+(t?"?"+t:"")},h=["uri","path"];t.startsWith=a,t.pick=l,t.match=function(e,t){return l([{path:e}],t)},t.resolve=function(e,t){if(a(e,"/"))return e;var n=e.split("?"),r=n[0],o=n[1],i=t.split("?")[0],l=d(r),u=d(i);if(""===l[0])return p(i,o);if(!a(l[0],".")){var c=u.concat(l).join("/");return p(("/"===i?"":"/")+c,o)}for(var s=u.concat(l),f=[],h=0,m=s.length;h<m;h++){var v=s[h];".."===v?f.pop():"."!==v&&f.push(v)}return p("/"+f.join("/"),o)},t.insertParams=function(e,t){return"/"+d(e).map(function(e){var n=u.exec(e);return n?t[n[1]]:e}).join("/")},t.validateRedirect=function(e,t){var n=function(e){return c(e)};return d(e).filter(n).sort().join("/")===d(t).filter(n).sort().join("/")}},function(e,t,n){"use strict";var r=n(26),o=RegExp.prototype.exec;e.exports=function(e,t){var n=e.exec;if("function"==typeof n){var i=n.call(e,t);if("object"!=typeof i)throw new TypeError("RegExp exec method returned something other than an Object or null");return i}if("RegExp"!==r(e))throw new TypeError("RegExp#exec called on incompatible receiver");return o.call(e,t)}},function(e,t,n){var r=n(14),o=n(3)("toStringTag"),i="Arguments"==r(function(){return arguments}());e.exports=function(e){var t,n,a;return void 0===e?"Undefined":null===e?"Null":"string"==typeof(n=function(e,t){try{return e[t]}catch(n){}}(t=Object(e),o))?n:i?r(t):"Object"==(a=r(t))&&"function"==typeof t.callee?"Arguments":a}},function(e,t,n){"use strict";n(79);var r=n(15),o=n(13),i=n(20),a=n(22),l=n(3),u=n(39),c=l("species"),s=!i(function(){var e=/./;return e.exec=function(){var e=[];return e.groups={a:"7"},e},"7"!=="".replace(e,"$<a>")}),f=function(){var e=/(?:)/,t=e.exec;e.exec=function(){return t.apply(this,arguments)};var n="ab".split(e);return 2===n.length&&"a"===n[0]&&"b"===n[1]}();e.exports=function(e,t,n){var d=l(e),p=!i(function(){var t={};return t[d]=function(){return 7},7!=""[e](t)}),h=p?!i(function(){var t=!1,n=/a/;return n.exec=function(){return t=!0,null},"split"===e&&(n.constructor={},n.constructor[c]=function(){return n}),n[d](""),!t}):void 0;if(!p||!h||"replace"===e&&!s||"split"===e&&!f){var m=/./[d],v=n(a,d,""[e],function(e,t,n,r,o){return t.exec===u?p&&!o?{done:!0,value:m.call(t,n,r)}:{done:!0,value:e.call(n,t,r)}:{done:!1}}),y=v[0],g=v[1];r(String.prototype,e,y),o(RegExp.prototype,d,2==t?function(e,t){return g.call(e,this,t)}:function(e){return g.call(e,this)})}}},function(e,t,n){var r=n(5),o=n(81),i=n(82),a=Object.defineProperty;t.f=n(19)?Object.defineProperty:function(e,t,n){if(r(e),t=i(t,!0),r(n),o)try{return a(e,t,n)}catch(l){}if("get"in n||"set"in n)throw TypeError("Accessors not supported!");return"value"in n&&(e[t]=n.value),e}},function(e,t){var n={}.hasOwnProperty;e.exports=function(e,t){return n.call(e,t)}},function(e,t){e.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},function(e,t){var n=Math.ceil,r=Math.floor;e.exports=function(e){return isNaN(e=+e)?0:(e>0?r:n)(e)}},function(e,t,n){var r=n(22);e.exports=function(e){return Object(r(e))}},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.withPrefix=p,t.withAssetPrefix=function(e){return["/team-report-a-cybercrime"].concat([e.replace(/^\//,"")]).join("/")},t.navigateTo=t.replace=t.push=t.navigate=t.default=void 0;var o=r(n(75)),i=r(n(76)),a=r(n(7)),l=r(n(35)),u=r(n(51)),c=r(n(4)),s=r(n(0)),f=n(17),d=n(139);function p(e){return function(e){return e.replace(/\/+/g,"/")}(["/team-report-a-cybercrime",e].join("/"))}t.parsePath=d.parsePath;var h={activeClassName:c.default.string,activeStyle:c.default.object,partiallyActive:c.default.bool},m=function(e){function t(t){var n;n=e.call(this,t)||this,(0,u.default)((0,l.default)((0,l.default)(n)),"defaultGetProps",function(e){var t=e.isPartiallyCurrent,r=e.isCurrent;return(n.props.partiallyActive?t:r)?{className:[n.props.className,n.props.activeClassName].filter(Boolean).join(" "),style:(0,i.default)({},n.props.style,n.props.activeStyle)}:null});var r=!1;return"undefined"!=typeof window&&window.IntersectionObserver&&(r=!0),n.state={IOSupported:r},n.handleRef=n.handleRef.bind((0,l.default)((0,l.default)(n))),n}(0,a.default)(t,e);var n=t.prototype;return n.componentDidUpdate=function(e,t){this.props.to===e.to||this.state.IOSupported||___loader.enqueue((0,d.parsePath)(this.props.to).pathname)},n.componentDidMount=function(){this.state.IOSupported||___loader.enqueue((0,d.parsePath)(this.props.to).pathname)},n.handleRef=function(e){var t,n,r,o=this;this.props.innerRef&&this.props.innerRef.hasOwnProperty("current")?this.props.innerRef.current=e:this.props.innerRef&&this.props.innerRef(e),this.state.IOSupported&&e&&(t=e,n=function(){___loader.enqueue((0,d.parsePath)(o.props.to).pathname)},(r=new window.IntersectionObserver(function(e){e.forEach(function(e){t===e.target&&(e.isIntersecting||e.intersectionRatio>0)&&(r.unobserve(t),r.disconnect(),n())})})).observe(t))},n.render=function(){var e=this,t=this.props,n=t.to,r=t.getProps,a=void 0===r?this.defaultGetProps:r,l=t.onClick,u=t.onMouseEnter,c=(t.activeClassName,t.activeStyle,t.innerRef,t.partiallyActive,t.state),h=t.replace,m=(0,o.default)(t,["to","getProps","onClick","onMouseEnter","activeClassName","activeStyle","innerRef","partiallyActive","state","replace"]);var v=p(n);return s.default.createElement(f.Link,(0,i.default)({to:v,state:c,getProps:a,innerRef:this.handleRef,onMouseEnter:function(e){u&&u(e),___loader.hovering((0,d.parsePath)(n).pathname)},onClick:function(t){return l&&l(t),0!==t.button||e.props.target||t.defaultPrevented||t.metaKey||t.altKey||t.ctrlKey||t.shiftKey||(t.preventDefault(),y(n,{state:c,replace:h})),!0}},m))},t}(s.default.Component);m.propTypes=(0,i.default)({},h,{onClick:c.default.func,to:c.default.string.isRequired,replace:c.default.bool});var v=s.default.forwardRef(function(e,t){return s.default.createElement(m,(0,i.default)({innerRef:t},e))});t.default=v;var y=function(e,t){window.___navigate(p(e),t)};t.navigate=y;var g=function(e){console.warn('The "push" method is now deprecated and will be removed in Gatsby v3. Please use "navigate" instead.'),window.___push(p(e))};t.push=g;t.replace=function(e){console.warn('The "replace" method is now deprecated and will be removed in Gatsby v3. Please use "navigate" instead.'),window.___replace(p(e))};t.navigateTo=function(e){return console.warn('The "navigateTo" method is now deprecated and will be removed in Gatsby v3. Please use "navigate" instead.'),g(e)}},function(e,t,n){var r=n(12);r(r.S+r.F,"Object",{assign:n(85)})},function(e,t){e.exports=function(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}},function(e,t,n){var r=n(18),o=n(6),i=o["__core-js_shared__"]||(o["__core-js_shared__"]={});(e.exports=function(e,t){return i[e]||(i[e]=void 0!==t?t:{})})("versions",[]).push({version:r.version,mode:n(37)?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},function(e,t){e.exports=!1},function(e,t){var n=0,r=Math.random();e.exports=function(e){return"Symbol(".concat(void 0===e?"":e,")_",(++n+r).toString(36))}},function(e,t,n){"use strict";var r,o,i=n(80),a=RegExp.prototype.exec,l=String.prototype.replace,u=a,c=(r=/a/,o=/b*/g,a.call(r,"a"),a.call(o,"a"),0!==r.lastIndex||0!==o.lastIndex),s=void 0!==/()??/.exec("")[1];(c||s)&&(u=function(e){var t,n,r,o,u=this;return s&&(n=new RegExp("^"+u.source+"$(?!\\s)",i.call(u))),c&&(t=u.lastIndex),r=a.call(u,e),c&&r&&(u.lastIndex=u.global?r.index+r[0].length:t),s&&r&&r.length>1&&l.call(r[0],n,function(){for(o=1;o<arguments.length-2;o++)void 0===arguments[o]&&(r[o]=void 0)}),r}),e.exports=u},function(e,t,n){var r=n(11),o=n(6).document,i=r(o)&&r(o.createElement);e.exports=function(e){return i?o.createElement(e):{}}},function(e,t,n){"use strict";var r=n(58)(!0);e.exports=function(e,t,n){return t+(n?r(e,t).length:1)}},function(e,t,n){var r=n(86),o=n(60);e.exports=Object.keys||function(e){return r(e,o)}},function(e,t,n){var r=n(44),o=n(22);e.exports=function(e){return r(o(e))}},function(e,t,n){var r=n(14);e.exports=Object("z").propertyIsEnumerable(0)?Object:function(e){return"String"==r(e)?e.split(""):Object(e)}},function(e,t,n){var r=n(36)("keys"),o=n(38);e.exports=function(e){return r[e]||(r[e]=o(e))}},function(e,t,n){"use strict";var r,o,i,a,l=n(37),u=n(6),c=n(21),s=n(26),f=n(12),d=n(11),p=n(30),h=n(90),m=n(91),v=n(61),y=n(62).set,g=n(96)(),b=n(64),w=n(97),_=n(98),x=n(99),k=u.TypeError,S=u.process,T=S&&S.versions,E=T&&T.v8||"",P=u.Promise,C="process"==s(S),R=function(){},O=o=b.f,N=!!function(){try{var e=P.resolve(1),t=(e.constructor={})[n(3)("species")]=function(e){e(R,R)};return(C||"function"==typeof PromiseRejectionEvent)&&e.then(R)instanceof t&&0!==E.indexOf("6.6")&&-1===_.indexOf("Chrome/66")}catch(r){}}(),j=function(e){var t;return!(!d(e)||"function"!=typeof(t=e.then))&&t},L=function(e,t){if(!e._n){e._n=!0;var n=e._c;g(function(){for(var r=e._v,o=1==e._s,i=0,a=function(t){var n,i,a,l=o?t.ok:t.fail,u=t.resolve,c=t.reject,s=t.domain;try{l?(o||(2==e._h&&I(e),e._h=1),!0===l?n=r:(s&&s.enter(),n=l(r),s&&(s.exit(),a=!0)),n===t.promise?c(k("Promise-chain cycle")):(i=j(n))?i.call(n,u,c):u(n)):c(r)}catch(f){s&&!a&&s.exit(),c(f)}};n.length>i;)a(n[i++]);e._c=[],e._n=!1,t&&!e._h&&M(e)})}},M=function(e){y.call(u,function(){var t,n,r,o=e._v,i=U(e);if(i&&(t=w(function(){C?S.emit("unhandledRejection",o,e):(n=u.onunhandledrejection)?n({promise:e,reason:o}):(r=u.console)&&r.error&&r.error("Unhandled promise rejection",o)}),e._h=C||U(e)?2:1),e._a=void 0,i&&t.e)throw t.v})},U=function(e){return 1!==e._h&&0===(e._a||e._c).length},I=function(e){y.call(u,function(){var t;C?S.emit("rejectionHandled",e):(t=u.onrejectionhandled)&&t({promise:e,reason:e._v})})},F=function(e){var t=this;t._d||(t._d=!0,(t=t._w||t)._v=e,t._s=2,t._a||(t._a=t._c.slice()),L(t,!0))},A=function(e){var t,n=this;if(!n._d){n._d=!0,n=n._w||n;try{if(n===e)throw k("Promise can't be resolved itself");(t=j(e))?g(function(){var r={_w:n,_d:!1};try{t.call(e,c(A,r,1),c(F,r,1))}catch(o){F.call(r,o)}}):(n._v=e,n._s=1,L(n,!1))}catch(r){F.call({_w:n,_d:!1},r)}}};N||(P=function(e){h(this,P,"Promise","_h"),p(e),r.call(this);try{e(c(A,this,1),c(F,this,1))}catch(t){F.call(this,t)}},(r=function(e){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1}).prototype=n(100)(P.prototype,{then:function(e,t){var n=O(v(this,P));return n.ok="function"!=typeof e||e,n.fail="function"==typeof t&&t,n.domain=C?S.domain:void 0,this._c.push(n),this._a&&this._a.push(n),this._s&&L(this,!1),n.promise},catch:function(e){return this.then(void 0,e)}}),i=function(){var e=new r;this.promise=e,this.resolve=c(A,e,1),this.reject=c(F,e,1)},b.f=O=function(e){return e===P||e===a?new i(e):o(e)}),f(f.G+f.W+f.F*!N,{Promise:P}),n(47)(P,"Promise"),n(101)("Promise"),a=n(18).Promise,f(f.S+f.F*!N,"Promise",{reject:function(e){var t=O(this);return(0,t.reject)(e),t.promise}}),f(f.S+f.F*(l||!N),"Promise",{resolve:function(e){return x(l&&this===a?P:this,e)}}),f(f.S+f.F*!(N&&n(102)(function(e){P.all(e).catch(R)})),"Promise",{all:function(e){var t=this,n=O(t),r=n.resolve,o=n.reject,i=w(function(){var n=[],i=0,a=1;m(e,!1,function(e){var l=i++,u=!1;n.push(void 0),a++,t.resolve(e).then(function(e){u||(u=!0,n[l]=e,--a||r(n))},o)}),--a||r(n)});return i.e&&o(i.v),n.promise},race:function(e){var t=this,n=O(t),r=n.reject,o=w(function(){m(e,!1,function(e){t.resolve(e).then(n.resolve,r)})});return o.e&&r(o.v),n.promise}})},function(e,t,n){var r=n(28).f,o=n(29),i=n(3)("toStringTag");e.exports=function(e,t,n){e&&!o(e=n?e:e.prototype,i)&&r(e,i,{configurable:!0,value:t})}},function(e,t,n){"use strict";var r=n(26),o={};o[n(3)("toStringTag")]="z",o+""!="[object z]"&&n(15)(Object.prototype,"toString",function(){return"[object "+r(this)+"]"},!0)},function(e,t,n){var r=n(3)("unscopables"),o=Array.prototype;null==o[r]&&n(13)(o,r,{}),e.exports=function(e){o[r][e]=!0}},function(e,t,n){"use strict";var r=n(5),o=n(32),i=n(16),a=n(31),l=n(41),u=n(25),c=Math.max,s=Math.min,f=Math.floor,d=/\$([$&`']|\d\d?|<[^>]*>)/g,p=/\$([$&`']|\d\d?)/g;n(27)("replace",2,function(e,t,n,h){return[function(r,o){var i=e(this),a=null==r?void 0:r[t];return void 0!==a?a.call(r,i,o):n.call(String(i),r,o)},function(e,t){var o=h(n,e,this,t);if(o.done)return o.value;var f=r(e),d=String(this),p="function"==typeof t;p||(t=String(t));var v=f.global;if(v){var y=f.unicode;f.lastIndex=0}for(var g=[];;){var b=u(f,d);if(null===b)break;if(g.push(b),!v)break;""===String(b[0])&&(f.lastIndex=l(d,i(f.lastIndex),y))}for(var w,_="",x=0,k=0;k<g.length;k++){b=g[k];for(var S=String(b[0]),T=c(s(a(b.index),d.length),0),E=[],P=1;P<b.length;P++)E.push(void 0===(w=b[P])?w:String(w));var C=b.groups;if(p){var R=[S].concat(E,T,d);void 0!==C&&R.push(C);var O=String(t.apply(void 0,R))}else O=m(S,d,T,E,C,t);T>=x&&(_+=d.slice(x,T)+O,x=T+S.length)}return _+d.slice(x)}];function m(e,t,r,i,a,l){var u=r+e.length,c=i.length,s=p;return void 0!==a&&(a=o(a),s=d),n.call(l,s,function(n,o){var l;switch(o.charAt(0)){case"$":return"$";case"&":return e;case"`":return t.slice(0,r);case"'":return t.slice(u);case"<":l=a[o.slice(1,-1)];break;default:var s=+o;if(0===s)return n;if(s>c){var d=f(s/10);return 0===d?n:d<=c?void 0===i[d-1]?o.charAt(1):i[d-1]+o.charAt(1):n}l=i[s-1]}return void 0===l?"":l})}})},function(e,t){e.exports=function(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}},function(e,t,n){"use strict";t.__esModule=!0,t.default=void 0;var r=!("undefined"==typeof window||!window.document||!window.document.createElement);t.default=r,e.exports=t.default},function(e,t,n){"use strict";!function e(){if("undefined"!=typeof __REACT_DEVTOOLS_GLOBAL_HOOK__&&"function"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE)try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(e)}catch(t){console.error(t)}}(),e.exports=n(123)},function(e,t,n){t.components={"component---src-pages-404-js":function(){return Promise.all([n.e(0),n.e(1),n.e(3)]).then(n.bind(null,142))},"component---src-pages-index-js":function(){return Promise.all([n.e(0),n.e(1),n.e(4)]).then(n.bind(null,145))},"component---src-pages-page-2-js":function(){return Promise.all([n.e(0),n.e(1),n.e(5)]).then(n.bind(null,143))}},t.data=function(){return n.e(6).then(n.t.bind(null,144,3))}},function(e,t,n){"use strict";n(34);var r=n(7),o=n.n(r),i=n(0),a=n.n(i),l=n(4),u=n.n(l),c=n(2),s=n(1),f=function(e){function t(){return e.apply(this,arguments)||this}return o()(t,e),t.prototype.render=function(){var e=Object.assign({},this.props,{pathContext:this.props.pageContext}),t=Object(s.apiRunner)("replaceComponentRenderer",{props:this.props,loader:c.publicLoader})[0]||Object(i.createElement)(this.props.pageResources.component,Object.assign({},e,{key:this.props.pageResources.page.path}));return Object(s.apiRunner)("wrapPageElement",{element:t,props:e},t,function(t){return{element:t.result,props:e}}).pop()},t}(a.a.Component);f.propTypes={location:u.a.object.isRequired,pageResources:u.a.object.isRequired,data:u.a.object,pageContext:u.a.object.isRequired},t.a=f},function(e,t,n){"use strict";var r=Object.getOwnPropertySymbols,o=Object.prototype.hasOwnProperty,i=Object.prototype.propertyIsEnumerable;e.exports=function(){try{if(!Object.assign)return!1;var e=new String("abc");if(e[5]="de","5"===Object.getOwnPropertyNames(e)[0])return!1;for(var t={},n=0;n<10;n++)t["_"+String.fromCharCode(n)]=n;if("0123456789"!==Object.getOwnPropertyNames(t).map(function(e){return t[e]}).join(""))return!1;var r={};return"abcdefghijklmnopqrst".split("").forEach(function(e){r[e]=e}),"abcdefghijklmnopqrst"===Object.keys(Object.assign({},r)).join("")}catch(o){return!1}}()?Object.assign:function(e,t){for(var n,a,l=function(e){if(null==e)throw new TypeError("Object.assign cannot be called with null or undefined");return Object(e)}(e),u=1;u<arguments.length;u++){for(var c in n=Object(arguments[u]))o.call(n,c)&&(l[c]=n[c]);if(r){a=r(n);for(var s=0;s<a.length;s++)i.call(n,a[s])&&(l[a[s]]=n[a[s]])}}return l}},function(e,t){e.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},function(e,t,n){var r=n(31),o=n(22);e.exports=function(e){return function(t,n){var i,a,l=String(o(t)),u=r(n),c=l.length;return u<0||u>=c?e?"":void 0:(i=l.charCodeAt(u))<55296||i>56319||u+1===c||(a=l.charCodeAt(u+1))<56320||a>57343?e?l.charAt(u):i:e?l.slice(u,u+2):a-56320+(i-55296<<10)+65536}}},function(e,t,n){var r=n(43),o=n(16),i=n(87);e.exports=function(e){return function(t,n,a){var l,u=r(t),c=o(u.length),s=i(a,c);if(e&&n!=n){for(;c>s;)if((l=u[s++])!=l)return!0}else for(;c>s;s++)if((e||s in u)&&u[s]===n)return e||s||0;return!e&&-1}}},function(e,t){e.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(e,t,n){var r=n(5),o=n(30),i=n(3)("species");e.exports=function(e,t){var n,a=r(e).constructor;return void 0===a||null==(n=r(a)[i])?t:o(n)}},function(e,t,n){var r,o,i,a=n(21),l=n(95),u=n(63),c=n(40),s=n(6),f=s.process,d=s.setImmediate,p=s.clearImmediate,h=s.MessageChannel,m=s.Dispatch,v=0,y={},g=function(){var e=+this;if(y.hasOwnProperty(e)){var t=y[e];delete y[e],t()}},b=function(e){g.call(e.data)};d&&p||(d=function(e){for(var t=[],n=1;arguments.length>n;)t.push(arguments[n++]);return y[++v]=function(){l("function"==typeof e?e:Function(e),t)},r(v),v},p=function(e){delete y[e]},"process"==n(14)(f)?r=function(e){f.nextTick(a(g,e,1))}:m&&m.now?r=function(e){m.now(a(g,e,1))}:h?(i=(o=new h).port2,o.port1.onmessage=b,r=a(i.postMessage,i,1)):s.addEventListener&&"function"==typeof postMessage&&!s.importScripts?(r=function(e){s.postMessage(e+"","*")},s.addEventListener("message",b,!1)):r="onreadystatechange"in c("script")?function(e){u.appendChild(c("script")).onreadystatechange=function(){u.removeChild(this),g.call(e)}}:function(e){setTimeout(a(g,e,1),0)}),e.exports={set:d,clear:p}},function(e,t,n){var r=n(6).document;e.exports=r&&r.documentElement},function(e,t,n){"use strict";var r=n(30);function o(e){var t,n;this.promise=new e(function(e,r){if(void 0!==t||void 0!==n)throw TypeError("Bad Promise constructor");t=e,n=r}),this.resolve=r(t),this.reject=r(n)}e.exports.f=function(e){return new o(e)}},function(e,t,n){var r=n(11),o=n(14),i=n(3)("match");e.exports=function(e){var t;return r(e)&&(void 0!==(t=e[i])?!!t:"RegExp"==o(e))}},function(e,t,n){"use strict";var r=n(49),o=n(115),i=n(23),a=n(43);e.exports=n(67)(Array,"Array",function(e,t){this._t=a(e),this._i=0,this._k=t},function(){var e=this._t,t=this._k,n=this._i++;return!e||n>=e.length?(this._t=void 0,o(1)):o(0,"keys"==t?n:"values"==t?e[n]:[n,e[n]])},"values"),i.Arguments=i.Array,r("keys"),r("values"),r("entries")},function(e,t,n){"use strict";var r=n(37),o=n(12),i=n(15),a=n(13),l=n(23),u=n(116),c=n(47),s=n(119),f=n(3)("iterator"),d=!([].keys&&"next"in[].keys()),p=function(){return this};e.exports=function(e,t,n,h,m,v,y){u(n,t,h);var g,b,w,_=function(e){if(!d&&e in T)return T[e];switch(e){case"keys":case"values":return function(){return new n(this,e)}}return function(){return new n(this,e)}},x=t+" Iterator",k="values"==m,S=!1,T=e.prototype,E=T[f]||T["@@iterator"]||m&&T[m],P=E||_(m),C=m?k?_("entries"):P:void 0,R="Array"==t&&T.entries||E;if(R&&(w=s(R.call(new e)))!==Object.prototype&&w.next&&(c(w,x,!0),r||"function"==typeof w[f]||a(w,f,p)),k&&E&&"values"!==E.name&&(S=!0,P=function(){return E.call(this)}),r&&!y||!d&&!S&&T[f]||a(T,f,P),l[t]=P,l[x]=p,m)if(g={values:k?P:_("values"),keys:v?P:_("keys"),entries:C},y)for(b in g)b in T||i(T,b,g[b]);else o(o.P+o.F*(d||S),t,g);return g}},function(e,t,n){"use strict";e.exports=function(){}},function(e,t,n){"use strict";t.__esModule=!0,t.default=function(e){return e===e.window?e:9===e.nodeType&&(e.defaultView||e.parentWindow)},e.exports=t.default},function(e,t){t.polyfill=function(e){return e}},function(e,t,n){"use strict";var r=n(8),o=r(n(128)),i=r(n(138));t.ScrollContainer=i.default,t.ScrollContext=o.default},function(e,t,n){e.exports=function(){var e=!1;-1!==navigator.appVersion.indexOf("MSIE 10")&&(e=!0);var t,n=[],r="object"==typeof document&&document,o=e?r.documentElement.doScroll():r.documentElement.doScroll,i=r&&(o?/^loaded|^c/:/^loaded|^i|^c/).test(r.readyState);return!i&&r&&r.addEventListener("DOMContentLoaded",t=function(){for(r.removeEventListener("DOMContentLoaded",t),i=1;t=n.shift();)t()}),function(e){i?setTimeout(e,0):n.push(e)}}()},function(e){e.exports=[]},function(e,t){var n;n=function(){return this}();try{n=n||new Function("return this")()}catch(r){"object"==typeof window&&(n=window)}e.exports=n},function(e,t){e.exports=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}},function(e,t){function n(){return e.exports=n=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},n.apply(this,arguments)}e.exports=n},function(e,t,n){"use strict";var r=n(5),o=n(78),i=n(25);n(27)("search",1,function(e,t,n,a){return[function(n){var r=e(this),o=null==n?void 0:n[t];return void 0!==o?o.call(n,r):new RegExp(n)[t](String(r))},function(e){var t=a(n,e,this);if(t.done)return t.value;var l=r(e),u=String(this),c=l.lastIndex;o(c,0)||(l.lastIndex=0);var s=i(l,u);return o(l.lastIndex,c)||(l.lastIndex=c),null===s?-1:s.index}]})},function(e,t){e.exports=Object.is||function(e,t){return e===t?0!==e||1/e==1/t:e!=e&&t!=t}},function(e,t,n){"use strict";var r=n(39);n(12)({target:"RegExp",proto:!0,forced:r!==/./.exec},{exec:r})},function(e,t,n){"use strict";var r=n(5);e.exports=function(){var e=r(this),t="";return e.global&&(t+="g"),e.ignoreCase&&(t+="i"),e.multiline&&(t+="m"),e.unicode&&(t+="u"),e.sticky&&(t+="y"),t}},function(e,t,n){e.exports=!n(19)&&!n(20)(function(){return 7!=Object.defineProperty(n(40)("div"),"a",{get:function(){return 7}}).a})},function(e,t,n){var r=n(11);e.exports=function(e,t){if(!r(e))return e;var n,o;if(t&&"function"==typeof(n=e.toString)&&!r(o=n.call(e)))return o;if("function"==typeof(n=e.valueOf)&&!r(o=n.call(e)))return o;if(!t&&"function"==typeof(n=e.toString)&&!r(o=n.call(e)))return o;throw TypeError("Can't convert object to primitive value")}},function(e,t,n){e.exports=n(36)("native-function-to-string",Function.toString)},function(e,t,n){"use strict";var r=n(5),o=n(16),i=n(41),a=n(25);n(27)("match",1,function(e,t,n,l){return[function(n){var r=e(this),o=null==n?void 0:n[t];return void 0!==o?o.call(n,r):new RegExp(n)[t](String(r))},function(e){var t=l(n,e,this);if(t.done)return t.value;var u=r(e),c=String(this);if(!u.global)return a(u,c);var s=u.unicode;u.lastIndex=0;for(var f,d=[],p=0;null!==(f=a(u,c));){var h=String(f[0]);d[p]=h,""===h&&(u.lastIndex=i(c,o(u.lastIndex),s)),p++}return 0===p?null:d}]})},function(e,t,n){"use strict";var r=n(42),o=n(88),i=n(89),a=n(32),l=n(44),u=Object.assign;e.exports=!u||n(20)(function(){var e={},t={},n=Symbol(),r="abcdefghijklmnopqrst";return e[n]=7,r.split("").forEach(function(e){t[e]=e}),7!=u({},e)[n]||Object.keys(u({},t)).join("")!=r})?function(e,t){for(var n=a(e),u=arguments.length,c=1,s=o.f,f=i.f;u>c;)for(var d,p=l(arguments[c++]),h=s?r(p).concat(s(p)):r(p),m=h.length,v=0;m>v;)f.call(p,d=h[v++])&&(n[d]=p[d]);return n}:u},function(e,t,n){var r=n(29),o=n(43),i=n(59)(!1),a=n(45)("IE_PROTO");e.exports=function(e,t){var n,l=o(e),u=0,c=[];for(n in l)n!=a&&r(l,n)&&c.push(n);for(;t.length>u;)r(l,n=t[u++])&&(~i(c,n)||c.push(n));return c}},function(e,t,n){var r=n(31),o=Math.max,i=Math.min;e.exports=function(e,t){return(e=r(e))<0?o(e+t,0):i(e,t)}},function(e,t){t.f=Object.getOwnPropertySymbols},function(e,t){t.f={}.propertyIsEnumerable},function(e,t){e.exports=function(e,t,n,r){if(!(e instanceof t)||void 0!==r&&r in e)throw TypeError(n+": incorrect invocation!");return e}},function(e,t,n){var r=n(21),o=n(92),i=n(93),a=n(5),l=n(16),u=n(94),c={},s={};(t=e.exports=function(e,t,n,f,d){var p,h,m,v,y=d?function(){return e}:u(e),g=r(n,f,t?2:1),b=0;if("function"!=typeof y)throw TypeError(e+" is not iterable!");if(i(y)){for(p=l(e.length);p>b;b++)if((v=t?g(a(h=e[b])[0],h[1]):g(e[b]))===c||v===s)return v}else for(m=y.call(e);!(h=m.next()).done;)if((v=o(m,g,h.value,t))===c||v===s)return v}).BREAK=c,t.RETURN=s},function(e,t,n){var r=n(5);e.exports=function(e,t,n,o){try{return o?t(r(n)[0],n[1]):t(n)}catch(a){var i=e.return;throw void 0!==i&&r(i.call(e)),a}}},function(e,t,n){var r=n(23),o=n(3)("iterator"),i=Array.prototype;e.exports=function(e){return void 0!==e&&(r.Array===e||i[o]===e)}},function(e,t,n){var r=n(26),o=n(3)("iterator"),i=n(23);e.exports=n(18).getIteratorMethod=function(e){if(null!=e)return e[o]||e["@@iterator"]||i[r(e)]}},function(e,t){e.exports=function(e,t,n){var r=void 0===n;switch(t.length){case 0:return r?e():e.call(n);case 1:return r?e(t[0]):e.call(n,t[0]);case 2:return r?e(t[0],t[1]):e.call(n,t[0],t[1]);case 3:return r?e(t[0],t[1],t[2]):e.call(n,t[0],t[1],t[2]);case 4:return r?e(t[0],t[1],t[2],t[3]):e.call(n,t[0],t[1],t[2],t[3])}return e.apply(n,t)}},function(e,t,n){var r=n(6),o=n(62).set,i=r.MutationObserver||r.WebKitMutationObserver,a=r.process,l=r.Promise,u="process"==n(14)(a);e.exports=function(){var e,t,n,c=function(){var r,o;for(u&&(r=a.domain)&&r.exit();e;){o=e.fn,e=e.next;try{o()}catch(i){throw e?n():t=void 0,i}}t=void 0,r&&r.enter()};if(u)n=function(){a.nextTick(c)};else if(!i||r.navigator&&r.navigator.standalone)if(l&&l.resolve){var s=l.resolve(void 0);n=function(){s.then(c)}}else n=function(){o.call(r,c)};else{var f=!0,d=document.createTextNode("");new i(c).observe(d,{characterData:!0}),n=function(){d.data=f=!f}}return function(r){var o={fn:r,next:void 0};t&&(t.next=o),e||(e=o,n()),t=o}}},function(e,t){e.exports=function(e){try{return{e:!1,v:e()}}catch(t){return{e:!0,v:t}}}},function(e,t,n){var r=n(6).navigator;e.exports=r&&r.userAgent||""},function(e,t,n){var r=n(5),o=n(11),i=n(64);e.exports=function(e,t){if(r(e),o(t)&&t.constructor===e)return t;var n=i.f(e);return(0,n.resolve)(t),n.promise}},function(e,t,n){var r=n(15);e.exports=function(e,t,n){for(var o in t)r(e,o,t[o],n);return e}},function(e,t,n){"use strict";var r=n(6),o=n(28),i=n(19),a=n(3)("species");e.exports=function(e){var t=r[e];i&&t&&!t[a]&&o.f(t,a,{configurable:!0,get:function(){return this}})}},function(e,t,n){var r=n(3)("iterator"),o=!1;try{var i=[7][r]();i.return=function(){o=!0},Array.from(i,function(){throw 2})}catch(a){}e.exports=function(e,t){if(!t&&!o)return!1;var n=!1;try{var i=[7],l=i[r]();l.next=function(){return{done:n=!0}},i[r]=function(){return l},e(i)}catch(a){}return n}},function(e,t,n){e.exports=[{plugin:n(104),options:{plugins:[]}}]},function(e,t){},function(e,t,n){"use strict";var r=n(12),o=n(59)(!0);r(r.P,"Array",{includes:function(e){return o(this,e,arguments.length>1?arguments[1]:void 0)}}),n(49)("includes")},function(e,t,n){"use strict";var r=n(12),o=n(107);r(r.P+r.F*n(108)("includes"),"String",{includes:function(e){return!!~o(this,e,"includes").indexOf(e,arguments.length>1?arguments[1]:void 0)}})},function(e,t,n){var r=n(65),o=n(22);e.exports=function(e,t,n){if(r(t))throw TypeError("String#"+n+" doesn't accept regex!");return String(o(e))}},function(e,t,n){var r=n(3)("match");e.exports=function(e){var t=/./;try{"/./"[e](t)}catch(n){try{return t[r]=!1,!"/./"[e](t)}catch(o){}}return!0}},function(e,t,n){"use strict";var r=n(12),o=n(110)(5),i=!0;"find"in[]&&Array(1).find(function(){i=!1}),r(r.P+r.F*i,"Array",{find:function(e){return o(this,e,arguments.length>1?arguments[1]:void 0)}}),n(49)("find")},function(e,t,n){var r=n(21),o=n(44),i=n(32),a=n(16),l=n(111);e.exports=function(e,t){var n=1==e,u=2==e,c=3==e,s=4==e,f=6==e,d=5==e||f,p=t||l;return function(t,l,h){for(var m,v,y=i(t),g=o(y),b=r(l,h,3),w=a(g.length),_=0,x=n?p(t,w):u?p(t,0):void 0;w>_;_++)if((d||_ in g)&&(v=b(m=g[_],_,y),e))if(n)x[_]=v;else if(v)switch(e){case 3:return!0;case 5:return m;case 6:return _;case 2:x.push(m)}else if(s)return!1;return f?-1:c||s?s:x}}},function(e,t,n){var r=n(112);e.exports=function(e,t){return new(r(e))(t)}},function(e,t,n){var r=n(11),o=n(113),i=n(3)("species");e.exports=function(e){var t;return o(e)&&("function"!=typeof(t=e.constructor)||t!==Array&&!o(t.prototype)||(t=void 0),r(t)&&null===(t=t[i])&&(t=void 0)),void 0===t?Array:t}},function(e,t,n){var r=n(14);e.exports=Array.isArray||function(e){return"Array"==r(e)}},function(e,t,n){for(var r=n(66),o=n(42),i=n(15),a=n(6),l=n(13),u=n(23),c=n(3),s=c("iterator"),f=c("toStringTag"),d=u.Array,p={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},h=o(p),m=0;m<h.length;m++){var v,y=h[m],g=p[y],b=a[y],w=b&&b.prototype;if(w&&(w[s]||l(w,s,d),w[f]||l(w,f,y),u[y]=d,g))for(v in r)w[v]||i(w,v,r[v],!0)}},function(e,t){e.exports=function(e,t){return{value:t,done:!!e}}},function(e,t,n){"use strict";var r=n(117),o=n(57),i=n(47),a={};n(13)(a,n(3)("iterator"),function(){return this}),e.exports=function(e,t,n){e.prototype=r(a,{next:o(1,n)}),i(e,t+" Iterator")}},function(e,t,n){var r=n(5),o=n(118),i=n(60),a=n(45)("IE_PROTO"),l=function(){},u=function(){var e,t=n(40)("iframe"),r=i.length;for(t.style.display="none",n(63).appendChild(t),t.src="javascript:",(e=t.contentWindow.document).open(),e.write("<script>document.F=Object<\/script>"),e.close(),u=e.F;r--;)delete u.prototype[i[r]];return u()};e.exports=Object.create||function(e,t){var n;return null!==e?(l.prototype=r(e),n=new l,l.prototype=null,n[a]=e):n=u(),void 0===t?n:o(n,t)}},function(e,t,n){var r=n(28),o=n(5),i=n(42);e.exports=n(19)?Object.defineProperties:function(e,t){o(e);for(var n,a=i(t),l=a.length,u=0;l>u;)r.f(e,n=a[u++],t[n]);return e}},function(e,t,n){var r=n(29),o=n(32),i=n(45)("IE_PROTO"),a=Object.prototype;e.exports=Object.getPrototypeOf||function(e){return e=o(e),r(e,i)?e[i]:"function"==typeof e.constructor&&e instanceof e.constructor?e.constructor.prototype:e instanceof Object?a:null}},function(e,t,n){"use strict";var r=n(58)(!0);n(67)(String,"String",function(e){this._t=String(e),this._i=0},function(){var e,t=this._t,n=this._i;return n>=t.length?{value:void 0,done:!0}:(e=r(t,n),this._i+=e.length,{value:e,done:!1})})},function(e,t,n){"use strict";var r=n(65),o=n(5),i=n(61),a=n(41),l=n(16),u=n(25),c=n(39),s=n(20),f=Math.min,d=[].push,p=!s(function(){RegExp(4294967295,"y")});n(27)("split",2,function(e,t,n,s){var h;return h="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(e,t){var o=String(this);if(void 0===e&&0===t)return[];if(!r(e))return n.call(o,e,t);for(var i,a,l,u=[],s=(e.ignoreCase?"i":"")+(e.multiline?"m":"")+(e.unicode?"u":"")+(e.sticky?"y":""),f=0,p=void 0===t?4294967295:t>>>0,h=new RegExp(e.source,s+"g");(i=c.call(h,o))&&!((a=h.lastIndex)>f&&(u.push(o.slice(f,i.index)),i.length>1&&i.index<o.length&&d.apply(u,i.slice(1)),l=i[0].length,f=a,u.length>=p));)h.lastIndex===i.index&&h.lastIndex++;return f===o.length?!l&&h.test("")||u.push(""):u.push(o.slice(f)),u.length>p?u.slice(0,p):u}:"0".split(void 0,0).length?function(e,t){return void 0===e&&0===t?[]:n.call(this,e,t)}:n,[function(n,r){var o=e(this),i=null==n?void 0:n[t];return void 0!==i?i.call(n,o,r):h.call(String(o),n,r)},function(e,t){var r=s(h,e,this,t,h!==n);if(r.done)return r.value;var c=o(e),d=String(this),m=i(c,RegExp),v=c.unicode,y=(c.ignoreCase?"i":"")+(c.multiline?"m":"")+(c.unicode?"u":"")+(p?"y":"g"),g=new m(p?c:"^(?:"+c.source+")",y),b=void 0===t?4294967295:t>>>0;if(0===b)return[];if(0===d.length)return null===u(g,d)?[d]:[];for(var w=0,_=0,x=[];_<d.length;){g.lastIndex=p?_:0;var k,S=u(g,p?d:d.slice(_));if(null===S||(k=f(l(g.lastIndex+(p?0:_)),d.length))===w)_=a(d,_,v);else{if(x.push(d.slice(w,_)),x.length===b)return x;for(var T=1;T<=S.length-1;T++)if(x.push(S[T]),x.length===b)return x;_=w=k}}return x.push(d.slice(w)),x}]})},function(e,t,n){"use strict";var r=n(56),o="function"==typeof Symbol&&Symbol.for,i=o?Symbol.for("react.element"):60103,a=o?Symbol.for("react.portal"):60106,l=o?Symbol.for("react.fragment"):60107,u=o?Symbol.for("react.strict_mode"):60108,c=o?Symbol.for("react.profiler"):60114,s=o?Symbol.for("react.provider"):60109,f=o?Symbol.for("react.context"):60110,d=o?Symbol.for("react.concurrent_mode"):60111,p=o?Symbol.for("react.forward_ref"):60112,h=o?Symbol.for("react.suspense"):60113,m=o?Symbol.for("react.memo"):60115,v=o?Symbol.for("react.lazy"):60116,y="function"==typeof Symbol&&Symbol.iterator;function g(e){for(var t=arguments.length-1,n="https://reactjs.org/docs/error-decoder.html?invariant="+e,r=0;r<t;r++)n+="&args[]="+encodeURIComponent(arguments[r+1]);!function(e,t,n,r,o,i,a,l){if(!e){if(e=void 0,void 0===t)e=Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings.");else{var u=[n,r,o,i,a,l],c=0;(e=Error(t.replace(/%s/g,function(){return u[c++]}))).name="Invariant Violation"}throw e.framesToPop=1,e}}(!1,"Minified React error #"+e+"; visit %s for the full message or use the non-minified dev environment for full errors and additional helpful warnings. ",n)}var b={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},w={};function _(e,t,n){this.props=e,this.context=t,this.refs=w,this.updater=n||b}function x(){}function k(e,t,n){this.props=e,this.context=t,this.refs=w,this.updater=n||b}_.prototype.isReactComponent={},_.prototype.setState=function(e,t){"object"!=typeof e&&"function"!=typeof e&&null!=e&&g("85"),this.updater.enqueueSetState(this,e,t,"setState")},_.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")},x.prototype=_.prototype;var S=k.prototype=new x;S.constructor=k,r(S,_.prototype),S.isPureReactComponent=!0;var T={current:null},E={current:null},P=Object.prototype.hasOwnProperty,C={key:!0,ref:!0,__self:!0,__source:!0};function R(e,t,n){var r=void 0,o={},a=null,l=null;if(null!=t)for(r in void 0!==t.ref&&(l=t.ref),void 0!==t.key&&(a=""+t.key),t)P.call(t,r)&&!C.hasOwnProperty(r)&&(o[r]=t[r]);var u=arguments.length-2;if(1===u)o.children=n;else if(1<u){for(var c=Array(u),s=0;s<u;s++)c[s]=arguments[s+2];o.children=c}if(e&&e.defaultProps)for(r in u=e.defaultProps)void 0===o[r]&&(o[r]=u[r]);return{$$typeof:i,type:e,key:a,ref:l,props:o,_owner:E.current}}function O(e){return"object"==typeof e&&null!==e&&e.$$typeof===i}var N=/\/+/g,j=[];function L(e,t,n,r){if(j.length){var o=j.pop();return o.result=e,o.keyPrefix=t,o.func=n,o.context=r,o.count=0,o}return{result:e,keyPrefix:t,func:n,context:r,count:0}}function M(e){e.result=null,e.keyPrefix=null,e.func=null,e.context=null,e.count=0,10>j.length&&j.push(e)}function U(e,t,n){return null==e?0:function e(t,n,r,o){var l=typeof t;"undefined"!==l&&"boolean"!==l||(t=null);var u=!1;if(null===t)u=!0;else switch(l){case"string":case"number":u=!0;break;case"object":switch(t.$$typeof){case i:case a:u=!0}}if(u)return r(o,t,""===n?"."+I(t,0):n),1;if(u=0,n=""===n?".":n+":",Array.isArray(t))for(var c=0;c<t.length;c++){var s=n+I(l=t[c],c);u+=e(l,s,r,o)}else if(s=null===t||"object"!=typeof t?null:"function"==typeof(s=y&&t[y]||t["@@iterator"])?s:null,"function"==typeof s)for(t=s.call(t),c=0;!(l=t.next()).done;)u+=e(l=l.value,s=n+I(l,c++),r,o);else"object"===l&&g("31","[object Object]"==(r=""+t)?"object with keys {"+Object.keys(t).join(", ")+"}":r,"");return u}(e,"",t,n)}function I(e,t){return"object"==typeof e&&null!==e&&null!=e.key?function(e){var t={"=":"=0",":":"=2"};return"$"+(""+e).replace(/[=:]/g,function(e){return t[e]})}(e.key):t.toString(36)}function F(e,t){e.func.call(e.context,t,e.count++)}function A(e,t,n){var r=e.result,o=e.keyPrefix;e=e.func.call(e.context,t,e.count++),Array.isArray(e)?D(e,r,n,function(e){return e}):null!=e&&(O(e)&&(e=function(e,t){return{$$typeof:i,type:e.type,key:t,ref:e.ref,props:e.props,_owner:e._owner}}(e,o+(!e.key||t&&t.key===e.key?"":(""+e.key).replace(N,"$&/")+"/")+n)),r.push(e))}function D(e,t,n,r,o){var i="";null!=n&&(i=(""+n).replace(N,"$&/")+"/"),U(e,A,t=L(t,i,r,o)),M(t)}function z(){var e=T.current;return null===e&&g("321"),e}var W={Children:{map:function(e,t,n){if(null==e)return e;var r=[];return D(e,r,null,t,n),r},forEach:function(e,t,n){if(null==e)return e;U(e,F,t=L(null,null,t,n)),M(t)},count:function(e){return U(e,function(){return null},null)},toArray:function(e){var t=[];return D(e,t,null,function(e){return e}),t},only:function(e){return O(e)||g("143"),e}},createRef:function(){return{current:null}},Component:_,PureComponent:k,createContext:function(e,t){return void 0===t&&(t=null),(e={$$typeof:f,_calculateChangedBits:t,_currentValue:e,_currentValue2:e,_threadCount:0,Provider:null,Consumer:null}).Provider={$$typeof:s,_context:e},e.Consumer=e},forwardRef:function(e){return{$$typeof:p,render:e}},lazy:function(e){return{$$typeof:v,_ctor:e,_status:-1,_result:null}},memo:function(e,t){return{$$typeof:m,type:e,compare:void 0===t?null:t}},useCallback:function(e,t){return z().useCallback(e,t)},useContext:function(e,t){return z().useContext(e,t)},useEffect:function(e,t){return z().useEffect(e,t)},useImperativeHandle:function(e,t,n){return z().useImperativeHandle(e,t,n)},useDebugValue:function(){},useLayoutEffect:function(e,t){return z().useLayoutEffect(e,t)},useMemo:function(e,t){return z().useMemo(e,t)},useReducer:function(e,t,n){return z().useReducer(e,t,n)},useRef:function(e){return z().useRef(e)},useState:function(e){return z().useState(e)},Fragment:l,StrictMode:u,Suspense:h,createElement:R,cloneElement:function(e,t,n){null==e&&g("267",e);var o=void 0,a=r({},e.props),l=e.key,u=e.ref,c=e._owner;if(null!=t){void 0!==t.ref&&(u=t.ref,c=E.current),void 0!==t.key&&(l=""+t.key);var s=void 0;for(o in e.type&&e.type.defaultProps&&(s=e.type.defaultProps),t)P.call(t,o)&&!C.hasOwnProperty(o)&&(a[o]=void 0===t[o]&&void 0!==s?s[o]:t[o])}if(1===(o=arguments.length-2))a.children=n;else if(1<o){s=Array(o);for(var f=0;f<o;f++)s[f]=arguments[f+2];a.children=s}return{$$typeof:i,type:e.type,key:l,ref:u,props:a,_owner:c}},createFactory:function(e){var t=R.bind(null,e);return t.type=e,t},isValidElement:O,version:"16.8.6",unstable_ConcurrentMode:d,unstable_Profiler:c,__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED:{ReactCurrentDispatcher:T,ReactCurrentOwner:E,assign:r}},B={default:W},H=B&&W||B;e.exports=H.default||H},function(e,t,n){"use strict";var r=n(0),o=n(56),i=n(124);function a(e){for(var t=arguments.length-1,n="https://reactjs.org/docs/error-decoder.html?invariant="+e,r=0;r<t;r++)n+="&args[]="+encodeURIComponent(arguments[r+1]);!function(e,t,n,r,o,i,a,l){if(!e){if(e=void 0,void 0===t)e=Error("Minified exception occurred; use the non-minified dev environment for the full error message and additional helpful warnings.");else{var u=[n,r,o,i,a,l],c=0;(e=Error(t.replace(/%s/g,function(){return u[c++]}))).name="Invariant Violation"}throw e.framesToPop=1,e}}(!1,"Minified React error #"+e+"; visit %s for the full message or use the non-minified dev environment for full errors and additional helpful warnings. ",n)}r||a("227");var l=!1,u=null,c=!1,s=null,f={onError:function(e){l=!0,u=e}};function d(e,t,n,r,o,i,a,c,s){l=!1,u=null,function(e,t,n,r,o,i,a,l,u){var c=Array.prototype.slice.call(arguments,3);try{t.apply(n,c)}catch(s){this.onError(s)}}.apply(f,arguments)}var p=null,h={};function m(){if(p)for(var e in h){var t=h[e],n=p.indexOf(e);if(-1<n||a("96",e),!y[n])for(var r in t.extractEvents||a("97",e),y[n]=t,n=t.eventTypes){var o=void 0,i=n[r],l=t,u=r;g.hasOwnProperty(u)&&a("99",u),g[u]=i;var c=i.phasedRegistrationNames;if(c){for(o in c)c.hasOwnProperty(o)&&v(c[o],l,u);o=!0}else i.registrationName?(v(i.registrationName,l,u),o=!0):o=!1;o||a("98",r,e)}}}function v(e,t,n){b[e]&&a("100",e),b[e]=t,w[e]=t.eventTypes[n].dependencies}var y=[],g={},b={},w={},_=null,x=null,k=null;function S(e,t,n){var r=e.type||"unknown-event";e.currentTarget=k(n),function(e,t,n,r,o,i,f,p,h){if(d.apply(this,arguments),l){if(l){var m=u;l=!1,u=null}else a("198"),m=void 0;c||(c=!0,s=m)}}(r,t,void 0,e),e.currentTarget=null}function T(e,t){return null==t&&a("30"),null==e?t:Array.isArray(e)?Array.isArray(t)?(e.push.apply(e,t),e):(e.push(t),e):Array.isArray(t)?[e].concat(t):[e,t]}function E(e,t,n){Array.isArray(e)?e.forEach(t,n):e&&t.call(n,e)}var P=null;function C(e){if(e){var t=e._dispatchListeners,n=e._dispatchInstances;if(Array.isArray(t))for(var r=0;r<t.length&&!e.isPropagationStopped();r++)S(e,t[r],n[r]);else t&&S(e,t,n);e._dispatchListeners=null,e._dispatchInstances=null,e.isPersistent()||e.constructor.release(e)}}var R={injectEventPluginOrder:function(e){p&&a("101"),p=Array.prototype.slice.call(e),m()},injectEventPluginsByName:function(e){var t,n=!1;for(t in e)if(e.hasOwnProperty(t)){var r=e[t];h.hasOwnProperty(t)&&h[t]===r||(h[t]&&a("102",t),h[t]=r,n=!0)}n&&m()}};function O(e,t){var n=e.stateNode;if(!n)return null;var r=_(n);if(!r)return null;n=r[t];e:switch(t){case"onClick":case"onClickCapture":case"onDoubleClick":case"onDoubleClickCapture":case"onMouseDown":case"onMouseDownCapture":case"onMouseMove":case"onMouseMoveCapture":case"onMouseUp":case"onMouseUpCapture":(r=!r.disabled)||(r=!("button"===(e=e.type)||"input"===e||"select"===e||"textarea"===e)),e=!r;break e;default:e=!1}return e?null:(n&&"function"!=typeof n&&a("231",t,typeof n),n)}function N(e){if(null!==e&&(P=T(P,e)),e=P,P=null,e&&(E(e,C),P&&a("95"),c))throw e=s,c=!1,s=null,e}var j=Math.random().toString(36).slice(2),L="__reactInternalInstance$"+j,M="__reactEventHandlers$"+j;function U(e){if(e[L])return e[L];for(;!e[L];){if(!e.parentNode)return null;e=e.parentNode}return 5===(e=e[L]).tag||6===e.tag?e:null}function I(e){return!(e=e[L])||5!==e.tag&&6!==e.tag?null:e}function F(e){if(5===e.tag||6===e.tag)return e.stateNode;a("33")}function A(e){return e[M]||null}function D(e){do{e=e.return}while(e&&5!==e.tag);return e||null}function z(e,t,n){(t=O(e,n.dispatchConfig.phasedRegistrationNames[t]))&&(n._dispatchListeners=T(n._dispatchListeners,t),n._dispatchInstances=T(n._dispatchInstances,e))}function W(e){if(e&&e.dispatchConfig.phasedRegistrationNames){for(var t=e._targetInst,n=[];t;)n.push(t),t=D(t);for(t=n.length;0<t--;)z(n[t],"captured",e);for(t=0;t<n.length;t++)z(n[t],"bubbled",e)}}function B(e,t,n){e&&n&&n.dispatchConfig.registrationName&&(t=O(e,n.dispatchConfig.registrationName))&&(n._dispatchListeners=T(n._dispatchListeners,t),n._dispatchInstances=T(n._dispatchInstances,e))}function H(e){e&&e.dispatchConfig.registrationName&&B(e._targetInst,null,e)}function V(e){E(e,W)}var $=!("undefined"==typeof window||!window.document||!window.document.createElement);function q(e,t){var n={};return n[e.toLowerCase()]=t.toLowerCase(),n["Webkit"+e]="webkit"+t,n["Moz"+e]="moz"+t,n}var K={animationend:q("Animation","AnimationEnd"),animationiteration:q("Animation","AnimationIteration"),animationstart:q("Animation","AnimationStart"),transitionend:q("Transition","TransitionEnd")},Q={},G={};function Y(e){if(Q[e])return Q[e];if(!K[e])return e;var t,n=K[e];for(t in n)if(n.hasOwnProperty(t)&&t in G)return Q[e]=n[t];return e}$&&(G=document.createElement("div").style,"AnimationEvent"in window||(delete K.animationend.animation,delete K.animationiteration.animation,delete K.animationstart.animation),"TransitionEvent"in window||delete K.transitionend.transition);var X=Y("animationend"),J=Y("animationiteration"),Z=Y("animationstart"),ee=Y("transitionend"),te="abort canplay canplaythrough durationchange emptied encrypted ended error loadeddata loadedmetadata loadstart pause play playing progress ratechange seeked seeking stalled suspend timeupdate volumechange waiting".split(" "),ne=null,re=null,oe=null;function ie(){if(oe)return oe;var e,t,n=re,r=n.length,o="value"in ne?ne.value:ne.textContent,i=o.length;for(e=0;e<r&&n[e]===o[e];e++);var a=r-e;for(t=1;t<=a&&n[r-t]===o[i-t];t++);return oe=o.slice(e,1<t?1-t:void 0)}function ae(){return!0}function le(){return!1}function ue(e,t,n,r){for(var o in this.dispatchConfig=e,this._targetInst=t,this.nativeEvent=n,e=this.constructor.Interface)e.hasOwnProperty(o)&&((t=e[o])?this[o]=t(n):"target"===o?this.target=r:this[o]=n[o]);return this.isDefaultPrevented=(null!=n.defaultPrevented?n.defaultPrevented:!1===n.returnValue)?ae:le,this.isPropagationStopped=le,this}function ce(e,t,n,r){if(this.eventPool.length){var o=this.eventPool.pop();return this.call(o,e,t,n,r),o}return new this(e,t,n,r)}function se(e){e instanceof this||a("279"),e.destructor(),10>this.eventPool.length&&this.eventPool.push(e)}function fe(e){e.eventPool=[],e.getPooled=ce,e.release=se}o(ue.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e&&(e.preventDefault?e.preventDefault():"unknown"!=typeof e.returnValue&&(e.returnValue=!1),this.isDefaultPrevented=ae)},stopPropagation:function(){var e=this.nativeEvent;e&&(e.stopPropagation?e.stopPropagation():"unknown"!=typeof e.cancelBubble&&(e.cancelBubble=!0),this.isPropagationStopped=ae)},persist:function(){this.isPersistent=ae},isPersistent:le,destructor:function(){var e,t=this.constructor.Interface;for(e in t)this[e]=null;this.nativeEvent=this._targetInst=this.dispatchConfig=null,this.isPropagationStopped=this.isDefaultPrevented=le,this._dispatchInstances=this._dispatchListeners=null}}),ue.Interface={type:null,target:null,currentTarget:function(){return null},eventPhase:null,bubbles:null,cancelable:null,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:null,isTrusted:null},ue.extend=function(e){function t(){}function n(){return r.apply(this,arguments)}var r=this;t.prototype=r.prototype;var i=new t;return o(i,n.prototype),n.prototype=i,n.prototype.constructor=n,n.Interface=o({},r.Interface,e),n.extend=r.extend,fe(n),n},fe(ue);var de=ue.extend({data:null}),pe=ue.extend({data:null}),he=[9,13,27,32],me=$&&"CompositionEvent"in window,ve=null;$&&"documentMode"in document&&(ve=document.documentMode);var ye=$&&"TextEvent"in window&&!ve,ge=$&&(!me||ve&&8<ve&&11>=ve),be=String.fromCharCode(32),we={beforeInput:{phasedRegistrationNames:{bubbled:"onBeforeInput",captured:"onBeforeInputCapture"},dependencies:["compositionend","keypress","textInput","paste"]},compositionEnd:{phasedRegistrationNames:{bubbled:"onCompositionEnd",captured:"onCompositionEndCapture"},dependencies:"blur compositionend keydown keypress keyup mousedown".split(" ")},compositionStart:{phasedRegistrationNames:{bubbled:"onCompositionStart",captured:"onCompositionStartCapture"},dependencies:"blur compositionstart keydown keypress keyup mousedown".split(" ")},compositionUpdate:{phasedRegistrationNames:{bubbled:"onCompositionUpdate",captured:"onCompositionUpdateCapture"},dependencies:"blur compositionupdate keydown keypress keyup mousedown".split(" ")}},_e=!1;function xe(e,t){switch(e){case"keyup":return-1!==he.indexOf(t.keyCode);case"keydown":return 229!==t.keyCode;case"keypress":case"mousedown":case"blur":return!0;default:return!1}}function ke(e){return"object"==typeof(e=e.detail)&&"data"in e?e.data:null}var Se=!1;var Te={eventTypes:we,extractEvents:function(e,t,n,r){var o=void 0,i=void 0;if(me)e:{switch(e){case"compositionstart":o=we.compositionStart;break e;case"compositionend":o=we.compositionEnd;break e;case"compositionupdate":o=we.compositionUpdate;break e}o=void 0}else Se?xe(e,n)&&(o=we.compositionEnd):"keydown"===e&&229===n.keyCode&&(o=we.compositionStart);return o?(ge&&"ko"!==n.locale&&(Se||o!==we.compositionStart?o===we.compositionEnd&&Se&&(i=ie()):(re="value"in(ne=r)?ne.value:ne.textContent,Se=!0)),o=de.getPooled(o,t,n,r),i?o.data=i:null!==(i=ke(n))&&(o.data=i),V(o),i=o):i=null,(e=ye?function(e,t){switch(e){case"compositionend":return ke(t);case"keypress":return 32!==t.which?null:(_e=!0,be);case"textInput":return(e=t.data)===be&&_e?null:e;default:return null}}(e,n):function(e,t){if(Se)return"compositionend"===e||!me&&xe(e,t)?(e=ie(),oe=re=ne=null,Se=!1,e):null;switch(e){case"paste":return null;case"keypress":if(!(t.ctrlKey||t.altKey||t.metaKey)||t.ctrlKey&&t.altKey){if(t.char&&1<t.char.length)return t.char;if(t.which)return String.fromCharCode(t.which)}return null;case"compositionend":return ge&&"ko"!==t.locale?null:t.data;default:return null}}(e,n))?((t=pe.getPooled(we.beforeInput,t,n,r)).data=e,V(t)):t=null,null===i?t:null===t?i:[i,t]}},Ee=null,Pe=null,Ce=null;function Re(e){if(e=x(e)){"function"!=typeof Ee&&a("280");var t=_(e.stateNode);Ee(e.stateNode,e.type,t)}}function Oe(e){Pe?Ce?Ce.push(e):Ce=[e]:Pe=e}function Ne(){if(Pe){var e=Pe,t=Ce;if(Ce=Pe=null,Re(e),t)for(e=0;e<t.length;e++)Re(t[e])}}function je(e,t){return e(t)}function Le(e,t,n){return e(t,n)}function Me(){}var Ue=!1;function Ie(e,t){if(Ue)return e(t);Ue=!0;try{return je(e,t)}finally{Ue=!1,(null!==Pe||null!==Ce)&&(Me(),Ne())}}var Fe={color:!0,date:!0,datetime:!0,"datetime-local":!0,email:!0,month:!0,number:!0,password:!0,range:!0,search:!0,tel:!0,text:!0,time:!0,url:!0,week:!0};function Ae(e){var t=e&&e.nodeName&&e.nodeName.toLowerCase();return"input"===t?!!Fe[e.type]:"textarea"===t}function De(e){return(e=e.target||e.srcElement||window).correspondingUseElement&&(e=e.correspondingUseElement),3===e.nodeType?e.parentNode:e}function ze(e){if(!$)return!1;var t=(e="on"+e)in document;return t||((t=document.createElement("div")).setAttribute(e,"return;"),t="function"==typeof t[e]),t}function We(e){var t=e.type;return(e=e.nodeName)&&"input"===e.toLowerCase()&&("checkbox"===t||"radio"===t)}function Be(e){e._valueTracker||(e._valueTracker=function(e){var t=We(e)?"checked":"value",n=Object.getOwnPropertyDescriptor(e.constructor.prototype,t),r=""+e[t];if(!e.hasOwnProperty(t)&&void 0!==n&&"function"==typeof n.get&&"function"==typeof n.set){var o=n.get,i=n.set;return Object.defineProperty(e,t,{configurable:!0,get:function(){return o.call(this)},set:function(e){r=""+e,i.call(this,e)}}),Object.defineProperty(e,t,{enumerable:n.enumerable}),{getValue:function(){return r},setValue:function(e){r=""+e},stopTracking:function(){e._valueTracker=null,delete e[t]}}}}(e))}function He(e){if(!e)return!1;var t=e._valueTracker;if(!t)return!0;var n=t.getValue(),r="";return e&&(r=We(e)?e.checked?"true":"false":e.value),(e=r)!==n&&(t.setValue(e),!0)}var Ve=r.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;Ve.hasOwnProperty("ReactCurrentDispatcher")||(Ve.ReactCurrentDispatcher={current:null});var $e=/^(.*)[\\\/]/,qe="function"==typeof Symbol&&Symbol.for,Ke=qe?Symbol.for("react.element"):60103,Qe=qe?Symbol.for("react.portal"):60106,Ge=qe?Symbol.for("react.fragment"):60107,Ye=qe?Symbol.for("react.strict_mode"):60108,Xe=qe?Symbol.for("react.profiler"):60114,Je=qe?Symbol.for("react.provider"):60109,Ze=qe?Symbol.for("react.context"):60110,et=qe?Symbol.for("react.concurrent_mode"):60111,tt=qe?Symbol.for("react.forward_ref"):60112,nt=qe?Symbol.for("react.suspense"):60113,rt=qe?Symbol.for("react.memo"):60115,ot=qe?Symbol.for("react.lazy"):60116,it="function"==typeof Symbol&&Symbol.iterator;function at(e){return null===e||"object"!=typeof e?null:"function"==typeof(e=it&&e[it]||e["@@iterator"])?e:null}function lt(e){if(null==e)return null;if("function"==typeof e)return e.displayName||e.name||null;if("string"==typeof e)return e;switch(e){case et:return"ConcurrentMode";case Ge:return"Fragment";case Qe:return"Portal";case Xe:return"Profiler";case Ye:return"StrictMode";case nt:return"Suspense"}if("object"==typeof e)switch(e.$$typeof){case Ze:return"Context.Consumer";case Je:return"Context.Provider";case tt:var t=e.render;return t=t.displayName||t.name||"",e.displayName||(""!==t?"ForwardRef("+t+")":"ForwardRef");case rt:return lt(e.type);case ot:if(e=1===e._status?e._result:null)return lt(e)}return null}function ut(e){var t="";do{e:switch(e.tag){case 3:case 4:case 6:case 7:case 10:case 9:var n="";break e;default:var r=e._debugOwner,o=e._debugSource,i=lt(e.type);n=null,r&&(n=lt(r.type)),r=i,i="",o?i=" (at "+o.fileName.replace($e,"")+":"+o.lineNumber+")":n&&(i=" (created by "+n+")"),n="\n in "+(r||"Unknown")+i}t+=n,e=e.return}while(e);return t}var ct=/^[:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD][:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\-.0-9\u00B7\u0300-\u036F\u203F-\u2040]*$/,st=Object.prototype.hasOwnProperty,ft={},dt={};function pt(e,t,n,r,o){this.acceptsBooleans=2===t||3===t||4===t,this.attributeName=r,this.attributeNamespace=o,this.mustUseProperty=n,this.propertyName=e,this.type=t}var ht={};"children dangerouslySetInnerHTML defaultValue defaultChecked innerHTML suppressContentEditableWarning suppressHydrationWarning style".split(" ").forEach(function(e){ht[e]=new pt(e,0,!1,e,null)}),[["acceptCharset","accept-charset"],["className","class"],["htmlFor","for"],["httpEquiv","http-equiv"]].forEach(function(e){var t=e[0];ht[t]=new pt(t,1,!1,e[1],null)}),["contentEditable","draggable","spellCheck","value"].forEach(function(e){ht[e]=new pt(e,2,!1,e.toLowerCase(),null)}),["autoReverse","externalResourcesRequired","focusable","preserveAlpha"].forEach(function(e){ht[e]=new pt(e,2,!1,e,null)}),"allowFullScreen async autoFocus autoPlay controls default defer disabled formNoValidate hidden loop noModule noValidate open playsInline readOnly required reversed scoped seamless itemScope".split(" ").forEach(function(e){ht[e]=new pt(e,3,!1,e.toLowerCase(),null)}),["checked","multiple","muted","selected"].forEach(function(e){ht[e]=new pt(e,3,!0,e,null)}),["capture","download"].forEach(function(e){ht[e]=new pt(e,4,!1,e,null)}),["cols","rows","size","span"].forEach(function(e){ht[e]=new pt(e,6,!1,e,null)}),["rowSpan","start"].forEach(function(e){ht[e]=new pt(e,5,!1,e.toLowerCase(),null)});var mt=/[\-:]([a-z])/g;function vt(e){return e[1].toUpperCase()}function yt(e,t,n,r){var o=ht.hasOwnProperty(t)?ht[t]:null;(null!==o?0===o.type:!r&&(2<t.length&&("o"===t[0]||"O"===t[0])&&("n"===t[1]||"N"===t[1])))||(function(e,t,n,r){if(null==t||function(e,t,n,r){if(null!==n&&0===n.type)return!1;switch(typeof t){case"function":case"symbol":return!0;case"boolean":return!r&&(null!==n?!n.acceptsBooleans:"data-"!==(e=e.toLowerCase().slice(0,5))&&"aria-"!==e);default:return!1}}(e,t,n,r))return!0;if(r)return!1;if(null!==n)switch(n.type){case 3:return!t;case 4:return!1===t;case 5:return isNaN(t);case 6:return isNaN(t)||1>t}return!1}(t,n,o,r)&&(n=null),r||null===o?function(e){return!!st.call(dt,e)||!st.call(ft,e)&&(ct.test(e)?dt[e]=!0:(ft[e]=!0,!1))}(t)&&(null===n?e.removeAttribute(t):e.setAttribute(t,""+n)):o.mustUseProperty?e[o.propertyName]=null===n?3!==o.type&&"":n:(t=o.attributeName,r=o.attributeNamespace,null===n?e.removeAttribute(t):(n=3===(o=o.type)||4===o&&!0===n?"":""+n,r?e.setAttributeNS(r,t,n):e.setAttribute(t,n))))}function gt(e){switch(typeof e){case"boolean":case"number":case"object":case"string":case"undefined":return e;default:return""}}function bt(e,t){var n=t.checked;return o({},t,{defaultChecked:void 0,defaultValue:void 0,value:void 0,checked:null!=n?n:e._wrapperState.initialChecked})}function wt(e,t){var n=null==t.defaultValue?"":t.defaultValue,r=null!=t.checked?t.checked:t.defaultChecked;n=gt(null!=t.value?t.value:n),e._wrapperState={initialChecked:r,initialValue:n,controlled:"checkbox"===t.type||"radio"===t.type?null!=t.checked:null!=t.value}}function _t(e,t){null!=(t=t.checked)&&yt(e,"checked",t,!1)}function xt(e,t){_t(e,t);var n=gt(t.value),r=t.type;if(null!=n)"number"===r?(0===n&&""===e.value||e.value!=n)&&(e.value=""+n):e.value!==""+n&&(e.value=""+n);else if("submit"===r||"reset"===r)return void e.removeAttribute("value");t.hasOwnProperty("value")?St(e,t.type,n):t.hasOwnProperty("defaultValue")&&St(e,t.type,gt(t.defaultValue)),null==t.checked&&null!=t.defaultChecked&&(e.defaultChecked=!!t.defaultChecked)}function kt(e,t,n){if(t.hasOwnProperty("value")||t.hasOwnProperty("defaultValue")){var r=t.type;if(!("submit"!==r&&"reset"!==r||void 0!==t.value&&null!==t.value))return;t=""+e._wrapperState.initialValue,n||t===e.value||(e.value=t),e.defaultValue=t}""!==(n=e.name)&&(e.name=""),e.defaultChecked=!e.defaultChecked,e.defaultChecked=!!e._wrapperState.initialChecked,""!==n&&(e.name=n)}function St(e,t,n){"number"===t&&e.ownerDocument.activeElement===e||(null==n?e.defaultValue=""+e._wrapperState.initialValue:e.defaultValue!==""+n&&(e.defaultValue=""+n))}"accent-height alignment-baseline arabic-form baseline-shift cap-height clip-path clip-rule color-interpolation color-interpolation-filters color-profile color-rendering dominant-baseline enable-background fill-opacity fill-rule flood-color flood-opacity font-family font-size font-size-adjust font-stretch font-style font-variant font-weight glyph-name glyph-orientation-horizontal glyph-orientation-vertical horiz-adv-x horiz-origin-x image-rendering letter-spacing lighting-color marker-end marker-mid marker-start overline-position overline-thickness paint-order panose-1 pointer-events rendering-intent shape-rendering stop-color stop-opacity strikethrough-position strikethrough-thickness stroke-dasharray stroke-dashoffset stroke-linecap stroke-linejoin stroke-miterlimit stroke-opacity stroke-width text-anchor text-decoration text-rendering underline-position underline-thickness unicode-bidi unicode-range units-per-em v-alphabetic v-hanging v-ideographic v-mathematical vector-effect vert-adv-y vert-origin-x vert-origin-y word-spacing writing-mode xmlns:xlink x-height".split(" ").forEach(function(e){var t=e.replace(mt,vt);ht[t]=new pt(t,1,!1,e,null)}),"xlink:actuate xlink:arcrole xlink:href xlink:role xlink:show xlink:title xlink:type".split(" ").forEach(function(e){var t=e.replace(mt,vt);ht[t]=new pt(t,1,!1,e,"http://www.w3.org/1999/xlink")}),["xml:base","xml:lang","xml:space"].forEach(function(e){var t=e.replace(mt,vt);ht[t]=new pt(t,1,!1,e,"http://www.w3.org/XML/1998/namespace")}),["tabIndex","crossOrigin"].forEach(function(e){ht[e]=new pt(e,1,!1,e.toLowerCase(),null)});var Tt={change:{phasedRegistrationNames:{bubbled:"onChange",captured:"onChangeCapture"},dependencies:"blur change click focus input keydown keyup selectionchange".split(" ")}};function Et(e,t,n){return(e=ue.getPooled(Tt.change,e,t,n)).type="change",Oe(n),V(e),e}var Pt=null,Ct=null;function Rt(e){N(e)}function Ot(e){if(He(F(e)))return e}function Nt(e,t){if("change"===e)return t}var jt=!1;function Lt(){Pt&&(Pt.detachEvent("onpropertychange",Mt),Ct=Pt=null)}function Mt(e){"value"===e.propertyName&&Ot(Ct)&&Ie(Rt,e=Et(Ct,e,De(e)))}function Ut(e,t,n){"focus"===e?(Lt(),Ct=n,(Pt=t).attachEvent("onpropertychange",Mt)):"blur"===e&&Lt()}function It(e){if("selectionchange"===e||"keyup"===e||"keydown"===e)return Ot(Ct)}function Ft(e,t){if("click"===e)return Ot(t)}function At(e,t){if("input"===e||"change"===e)return Ot(t)}$&&(jt=ze("input")&&(!document.documentMode||9<document.documentMode));var Dt={eventTypes:Tt,_isInputEventSupported:jt,extractEvents:function(e,t,n,r){var o=t?F(t):window,i=void 0,a=void 0,l=o.nodeName&&o.nodeName.toLowerCase();if("select"===l||"input"===l&&"file"===o.type?i=Nt:Ae(o)?jt?i=At:(i=It,a=Ut):(l=o.nodeName)&&"input"===l.toLowerCase()&&("checkbox"===o.type||"radio"===o.type)&&(i=Ft),i&&(i=i(e,t)))return Et(i,n,r);a&&a(e,o,t),"blur"===e&&(e=o._wrapperState)&&e.controlled&&"number"===o.type&&St(o,"number",o.value)}},zt=ue.extend({view:null,detail:null}),Wt={Alt:"altKey",Control:"ctrlKey",Meta:"metaKey",Shift:"shiftKey"};function Bt(e){var t=this.nativeEvent;return t.getModifierState?t.getModifierState(e):!!(e=Wt[e])&&!!t[e]}function Ht(){return Bt}var Vt=0,$t=0,qt=!1,Kt=!1,Qt=zt.extend({screenX:null,screenY:null,clientX:null,clientY:null,pageX:null,pageY:null,ctrlKey:null,shiftKey:null,altKey:null,metaKey:null,getModifierState:Ht,button:null,buttons:null,relatedTarget:function(e){return e.relatedTarget||(e.fromElement===e.srcElement?e.toElement:e.fromElement)},movementX:function(e){if("movementX"in e)return e.movementX;var t=Vt;return Vt=e.screenX,qt?"mousemove"===e.type?e.screenX-t:0:(qt=!0,0)},movementY:function(e){if("movementY"in e)return e.movementY;var t=$t;return $t=e.screenY,Kt?"mousemove"===e.type?e.screenY-t:0:(Kt=!0,0)}}),Gt=Qt.extend({pointerId:null,width:null,height:null,pressure:null,tangentialPressure:null,tiltX:null,tiltY:null,twist:null,pointerType:null,isPrimary:null}),Yt={mouseEnter:{registrationName:"onMouseEnter",dependencies:["mouseout","mouseover"]},mouseLeave:{registrationName:"onMouseLeave",dependencies:["mouseout","mouseover"]},pointerEnter:{registrationName:"onPointerEnter",dependencies:["pointerout","pointerover"]},pointerLeave:{registrationName:"onPointerLeave",dependencies:["pointerout","pointerover"]}},Xt={eventTypes:Yt,extractEvents:function(e,t,n,r){var o="mouseover"===e||"pointerover"===e,i="mouseout"===e||"pointerout"===e;if(o&&(n.relatedTarget||n.fromElement)||!i&&!o)return null;if(o=r.window===r?r:(o=r.ownerDocument)?o.defaultView||o.parentWindow:window,i?(i=t,t=(t=n.relatedTarget||n.toElement)?U(t):null):i=null,i===t)return null;var a=void 0,l=void 0,u=void 0,c=void 0;"mouseout"===e||"mouseover"===e?(a=Qt,l=Yt.mouseLeave,u=Yt.mouseEnter,c="mouse"):"pointerout"!==e&&"pointerover"!==e||(a=Gt,l=Yt.pointerLeave,u=Yt.pointerEnter,c="pointer");var s=null==i?o:F(i);if(o=null==t?o:F(t),(e=a.getPooled(l,i,n,r)).type=c+"leave",e.target=s,e.relatedTarget=o,(n=a.getPooled(u,t,n,r)).type=c+"enter",n.target=o,n.relatedTarget=s,r=t,i&&r)e:{for(o=r,c=0,a=t=i;a;a=D(a))c++;for(a=0,u=o;u;u=D(u))a++;for(;0<c-a;)t=D(t),c--;for(;0<a-c;)o=D(o),a--;for(;c--;){if(t===o||t===o.alternate)break e;t=D(t),o=D(o)}t=null}else t=null;for(o=t,t=[];i&&i!==o&&(null===(c=i.alternate)||c!==o);)t.push(i),i=D(i);for(i=[];r&&r!==o&&(null===(c=r.alternate)||c!==o);)i.push(r),r=D(r);for(r=0;r<t.length;r++)B(t[r],"bubbled",e);for(r=i.length;0<r--;)B(i[r],"captured",n);return[e,n]}};function Jt(e,t){return e===t&&(0!==e||1/e==1/t)||e!=e&&t!=t}var Zt=Object.prototype.hasOwnProperty;function en(e,t){if(Jt(e,t))return!0;if("object"!=typeof e||null===e||"object"!=typeof t||null===t)return!1;var n=Object.keys(e),r=Object.keys(t);if(n.length!==r.length)return!1;for(r=0;r<n.length;r++)if(!Zt.call(t,n[r])||!Jt(e[n[r]],t[n[r]]))return!1;return!0}function tn(e){var t=e;if(e.alternate)for(;t.return;)t=t.return;else{if(0!=(2&t.effectTag))return 1;for(;t.return;)if(0!=(2&(t=t.return).effectTag))return 1}return 3===t.tag?2:3}function nn(e){2!==tn(e)&&a("188")}function rn(e){if(!(e=function(e){var t=e.alternate;if(!t)return 3===(t=tn(e))&&a("188"),1===t?null:e;for(var n=e,r=t;;){var o=n.return,i=o?o.alternate:null;if(!o||!i)break;if(o.child===i.child){for(var l=o.child;l;){if(l===n)return nn(o),e;if(l===r)return nn(o),t;l=l.sibling}a("188")}if(n.return!==r.return)n=o,r=i;else{l=!1;for(var u=o.child;u;){if(u===n){l=!0,n=o,r=i;break}if(u===r){l=!0,r=o,n=i;break}u=u.sibling}if(!l){for(u=i.child;u;){if(u===n){l=!0,n=i,r=o;break}if(u===r){l=!0,r=i,n=o;break}u=u.sibling}l||a("189")}}n.alternate!==r&&a("190")}return 3!==n.tag&&a("188"),n.stateNode.current===n?e:t}(e)))return null;for(var t=e;;){if(5===t.tag||6===t.tag)return t;if(t.child)t.child.return=t,t=t.child;else{if(t===e)break;for(;!t.sibling;){if(!t.return||t.return===e)return null;t=t.return}t.sibling.return=t.return,t=t.sibling}}return null}var on=ue.extend({animationName:null,elapsedTime:null,pseudoElement:null}),an=ue.extend({clipboardData:function(e){return"clipboardData"in e?e.clipboardData:window.clipboardData}}),ln=zt.extend({relatedTarget:null});function un(e){var t=e.keyCode;return"charCode"in e?0===(e=e.charCode)&&13===t&&(e=13):e=t,10===e&&(e=13),32<=e||13===e?e:0}var cn={Esc:"Escape",Spacebar:" ",Left:"ArrowLeft",Up:"ArrowUp",Right:"ArrowRight",Down:"ArrowDown",Del:"Delete",Win:"OS",Menu:"ContextMenu",Apps:"ContextMenu",Scroll:"ScrollLock",MozPrintableKey:"Unidentified"},sn={8:"Backspace",9:"Tab",12:"Clear",13:"Enter",16:"Shift",17:"Control",18:"Alt",19:"Pause",20:"CapsLock",27:"Escape",32:" ",33:"PageUp",34:"PageDown",35:"End",36:"Home",37:"ArrowLeft",38:"ArrowUp",39:"ArrowRight",40:"ArrowDown",45:"Insert",46:"Delete",112:"F1",113:"F2",114:"F3",115:"F4",116:"F5",117:"F6",118:"F7",119:"F8",120:"F9",121:"F10",122:"F11",123:"F12",144:"NumLock",145:"ScrollLock",224:"Meta"},fn=zt.extend({key:function(e){if(e.key){var t=cn[e.key]||e.key;if("Unidentified"!==t)return t}return"keypress"===e.type?13===(e=un(e))?"Enter":String.fromCharCode(e):"keydown"===e.type||"keyup"===e.type?sn[e.keyCode]||"Unidentified":""},location:null,ctrlKey:null,shiftKey:null,altKey:null,metaKey:null,repeat:null,locale:null,getModifierState:Ht,charCode:function(e){return"keypress"===e.type?un(e):0},keyCode:function(e){return"keydown"===e.type||"keyup"===e.type?e.keyCode:0},which:function(e){return"keypress"===e.type?un(e):"keydown"===e.type||"keyup"===e.type?e.keyCode:0}}),dn=Qt.extend({dataTransfer:null}),pn=zt.extend({touches:null,targetTouches:null,changedTouches:null,altKey:null,metaKey:null,ctrlKey:null,shiftKey:null,getModifierState:Ht}),hn=ue.extend({propertyName:null,elapsedTime:null,pseudoElement:null}),mn=Qt.extend({deltaX:function(e){return"deltaX"in e?e.deltaX:"wheelDeltaX"in e?-e.wheelDeltaX:0},deltaY:function(e){return"deltaY"in e?e.deltaY:"wheelDeltaY"in e?-e.wheelDeltaY:"wheelDelta"in e?-e.wheelDelta:0},deltaZ:null,deltaMode:null}),vn=[["abort","abort"],[X,"animationEnd"],[J,"animationIteration"],[Z,"animationStart"],["canplay","canPlay"],["canplaythrough","canPlayThrough"],["drag","drag"],["dragenter","dragEnter"],["dragexit","dragExit"],["dragleave","dragLeave"],["dragover","dragOver"],["durationchange","durationChange"],["emptied","emptied"],["encrypted","encrypted"],["ended","ended"],["error","error"],["gotpointercapture","gotPointerCapture"],["load","load"],["loadeddata","loadedData"],["loadedmetadata","loadedMetadata"],["loadstart","loadStart"],["lostpointercapture","lostPointerCapture"],["mousemove","mouseMove"],["mouseout","mouseOut"],["mouseover","mouseOver"],["playing","playing"],["pointermove","pointerMove"],["pointerout","pointerOut"],["pointerover","pointerOver"],["progress","progress"],["scroll","scroll"],["seeking","seeking"],["stalled","stalled"],["suspend","suspend"],["timeupdate","timeUpdate"],["toggle","toggle"],["touchmove","touchMove"],[ee,"transitionEnd"],["waiting","waiting"],["wheel","wheel"]],yn={},gn={};function bn(e,t){var n=e[0],r="on"+((e=e[1])[0].toUpperCase()+e.slice(1));t={phasedRegistrationNames:{bubbled:r,captured:r+"Capture"},dependencies:[n],isInteractive:t},yn[e]=t,gn[n]=t}[["blur","blur"],["cancel","cancel"],["click","click"],["close","close"],["contextmenu","contextMenu"],["copy","copy"],["cut","cut"],["auxclick","auxClick"],["dblclick","doubleClick"],["dragend","dragEnd"],["dragstart","dragStart"],["drop","drop"],["focus","focus"],["input","input"],["invalid","invalid"],["keydown","keyDown"],["keypress","keyPress"],["keyup","keyUp"],["mousedown","mouseDown"],["mouseup","mouseUp"],["paste","paste"],["pause","pause"],["play","play"],["pointercancel","pointerCancel"],["pointerdown","pointerDown"],["pointerup","pointerUp"],["ratechange","rateChange"],["reset","reset"],["seeked","seeked"],["submit","submit"],["touchcancel","touchCancel"],["touchend","touchEnd"],["touchstart","touchStart"],["volumechange","volumeChange"]].forEach(function(e){bn(e,!0)}),vn.forEach(function(e){bn(e,!1)});var wn={eventTypes:yn,isInteractiveTopLevelEventType:function(e){return void 0!==(e=gn[e])&&!0===e.isInteractive},extractEvents:function(e,t,n,r){var o=gn[e];if(!o)return null;switch(e){case"keypress":if(0===un(n))return null;case"keydown":case"keyup":e=fn;break;case"blur":case"focus":e=ln;break;case"click":if(2===n.button)return null;case"auxclick":case"dblclick":case"mousedown":case"mousemove":case"mouseup":case"mouseout":case"mouseover":case"contextmenu":e=Qt;break;case"drag":case"dragend":case"dragenter":case"dragexit":case"dragleave":case"dragover":case"dragstart":case"drop":e=dn;break;case"touchcancel":case"touchend":case"touchmove":case"touchstart":e=pn;break;case X:case J:case Z:e=on;break;case ee:e=hn;break;case"scroll":e=zt;break;case"wheel":e=mn;break;case"copy":case"cut":case"paste":e=an;break;case"gotpointercapture":case"lostpointercapture":case"pointercancel":case"pointerdown":case"pointermove":case"pointerout":case"pointerover":case"pointerup":e=Gt;break;default:e=ue}return V(t=e.getPooled(o,t,n,r)),t}},_n=wn.isInteractiveTopLevelEventType,xn=[];function kn(e){var t=e.targetInst,n=t;do{if(!n){e.ancestors.push(n);break}var r;for(r=n;r.return;)r=r.return;if(!(r=3!==r.tag?null:r.stateNode.containerInfo))break;e.ancestors.push(n),n=U(r)}while(n);for(n=0;n<e.ancestors.length;n++){t=e.ancestors[n];var o=De(e.nativeEvent);r=e.topLevelType;for(var i=e.nativeEvent,a=null,l=0;l<y.length;l++){var u=y[l];u&&(u=u.extractEvents(r,t,i,o))&&(a=T(a,u))}N(a)}}var Sn=!0;function Tn(e,t){if(!t)return null;var n=(_n(e)?Pn:Cn).bind(null,e);t.addEventListener(e,n,!1)}function En(e,t){if(!t)return null;var n=(_n(e)?Pn:Cn).bind(null,e);t.addEventListener(e,n,!0)}function Pn(e,t){Le(Cn,e,t)}function Cn(e,t){if(Sn){var n=De(t);if(null===(n=U(n))||"number"!=typeof n.tag||2===tn(n)||(n=null),xn.length){var r=xn.pop();r.topLevelType=e,r.nativeEvent=t,r.targetInst=n,e=r}else e={topLevelType:e,nativeEvent:t,targetInst:n,ancestors:[]};try{Ie(kn,e)}finally{e.topLevelType=null,e.nativeEvent=null,e.targetInst=null,e.ancestors.length=0,10>xn.length&&xn.push(e)}}}var Rn={},On=0,Nn="_reactListenersID"+(""+Math.random()).slice(2);function jn(e){return Object.prototype.hasOwnProperty.call(e,Nn)||(e[Nn]=On++,Rn[e[Nn]]={}),Rn[e[Nn]]}function Ln(e){if(void 0===(e=e||("undefined"!=typeof document?document:void 0)))return null;try{return e.activeElement||e.body}catch(t){return e.body}}function Mn(e){for(;e&&e.firstChild;)e=e.firstChild;return e}function Un(e,t){var n,r=Mn(e);for(e=0;r;){if(3===r.nodeType){if(n=e+r.textContent.length,e<=t&&n>=t)return{node:r,offset:t-e};e=n}e:{for(;r;){if(r.nextSibling){r=r.nextSibling;break e}r=r.parentNode}r=void 0}r=Mn(r)}}function In(){for(var e=window,t=Ln();t instanceof e.HTMLIFrameElement;){try{var n="string"==typeof t.contentWindow.location.href}catch(r){n=!1}if(!n)break;t=Ln((e=t.contentWindow).document)}return t}function Fn(e){var t=e&&e.nodeName&&e.nodeName.toLowerCase();return t&&("input"===t&&("text"===e.type||"search"===e.type||"tel"===e.type||"url"===e.type||"password"===e.type)||"textarea"===t||"true"===e.contentEditable)}function An(e){var t=In(),n=e.focusedElem,r=e.selectionRange;if(t!==n&&n&&n.ownerDocument&&function e(t,n){return!(!t||!n)&&(t===n||(!t||3!==t.nodeType)&&(n&&3===n.nodeType?e(t,n.parentNode):"contains"in t?t.contains(n):!!t.compareDocumentPosition&&!!(16&t.compareDocumentPosition(n))))}(n.ownerDocument.documentElement,n)){if(null!==r&&Fn(n))if(t=r.start,void 0===(e=r.end)&&(e=t),"selectionStart"in n)n.selectionStart=t,n.selectionEnd=Math.min(e,n.value.length);else if((e=(t=n.ownerDocument||document)&&t.defaultView||window).getSelection){e=e.getSelection();var o=n.textContent.length,i=Math.min(r.start,o);r=void 0===r.end?i:Math.min(r.end,o),!e.extend&&i>r&&(o=r,r=i,i=o),o=Un(n,i);var a=Un(n,r);o&&a&&(1!==e.rangeCount||e.anchorNode!==o.node||e.anchorOffset!==o.offset||e.focusNode!==a.node||e.focusOffset!==a.offset)&&((t=t.createRange()).setStart(o.node,o.offset),e.removeAllRanges(),i>r?(e.addRange(t),e.extend(a.node,a.offset)):(t.setEnd(a.node,a.offset),e.addRange(t)))}for(t=[],e=n;e=e.parentNode;)1===e.nodeType&&t.push({element:e,left:e.scrollLeft,top:e.scrollTop});for("function"==typeof n.focus&&n.focus(),n=0;n<t.length;n++)(e=t[n]).element.scrollLeft=e.left,e.element.scrollTop=e.top}}var Dn=$&&"documentMode"in document&&11>=document.documentMode,zn={select:{phasedRegistrationNames:{bubbled:"onSelect",captured:"onSelectCapture"},dependencies:"blur contextmenu dragend focus keydown keyup mousedown mouseup selectionchange".split(" ")}},Wn=null,Bn=null,Hn=null,Vn=!1;function $n(e,t){var n=t.window===t?t.document:9===t.nodeType?t:t.ownerDocument;return Vn||null==Wn||Wn!==Ln(n)?null:("selectionStart"in(n=Wn)&&Fn(n)?n={start:n.selectionStart,end:n.selectionEnd}:n={anchorNode:(n=(n.ownerDocument&&n.ownerDocument.defaultView||window).getSelection()).anchorNode,anchorOffset:n.anchorOffset,focusNode:n.focusNode,focusOffset:n.focusOffset},Hn&&en(Hn,n)?null:(Hn=n,(e=ue.getPooled(zn.select,Bn,e,t)).type="select",e.target=Wn,V(e),e))}var qn={eventTypes:zn,extractEvents:function(e,t,n,r){var o,i=r.window===r?r.document:9===r.nodeType?r:r.ownerDocument;if(!(o=!i)){e:{i=jn(i),o=w.onSelect;for(var a=0;a<o.length;a++){var l=o[a];if(!i.hasOwnProperty(l)||!i[l]){i=!1;break e}}i=!0}o=!i}if(o)return null;switch(i=t?F(t):window,e){case"focus":(Ae(i)||"true"===i.contentEditable)&&(Wn=i,Bn=t,Hn=null);break;case"blur":Hn=Bn=Wn=null;break;case"mousedown":Vn=!0;break;case"contextmenu":case"mouseup":case"dragend":return Vn=!1,$n(n,r);case"selectionchange":if(Dn)break;case"keydown":case"keyup":return $n(n,r)}return null}};function Kn(e,t){return e=o({children:void 0},t),(t=function(e){var t="";return r.Children.forEach(e,function(e){null!=e&&(t+=e)}),t}(t.children))&&(e.children=t),e}function Qn(e,t,n,r){if(e=e.options,t){t={};for(var o=0;o<n.length;o++)t["$"+n[o]]=!0;for(n=0;n<e.length;n++)o=t.hasOwnProperty("$"+e[n].value),e[n].selected!==o&&(e[n].selected=o),o&&r&&(e[n].defaultSelected=!0)}else{for(n=""+gt(n),t=null,o=0;o<e.length;o++){if(e[o].value===n)return e[o].selected=!0,void(r&&(e[o].defaultSelected=!0));null!==t||e[o].disabled||(t=e[o])}null!==t&&(t.selected=!0)}}function Gn(e,t){return null!=t.dangerouslySetInnerHTML&&a("91"),o({},t,{value:void 0,defaultValue:void 0,children:""+e._wrapperState.initialValue})}function Yn(e,t){var n=t.value;null==n&&(n=t.defaultValue,null!=(t=t.children)&&(null!=n&&a("92"),Array.isArray(t)&&(1>=t.length||a("93"),t=t[0]),n=t),null==n&&(n="")),e._wrapperState={initialValue:gt(n)}}function Xn(e,t){var n=gt(t.value),r=gt(t.defaultValue);null!=n&&((n=""+n)!==e.value&&(e.value=n),null==t.defaultValue&&e.defaultValue!==n&&(e.defaultValue=n)),null!=r&&(e.defaultValue=""+r)}function Jn(e){var t=e.textContent;t===e._wrapperState.initialValue&&(e.value=t)}R.injectEventPluginOrder("ResponderEventPlugin SimpleEventPlugin EnterLeaveEventPlugin ChangeEventPlugin SelectEventPlugin BeforeInputEventPlugin".split(" ")),_=A,x=I,k=F,R.injectEventPluginsByName({SimpleEventPlugin:wn,EnterLeaveEventPlugin:Xt,ChangeEventPlugin:Dt,SelectEventPlugin:qn,BeforeInputEventPlugin:Te});var Zn={html:"http://www.w3.org/1999/xhtml",mathml:"http://www.w3.org/1998/Math/MathML",svg:"http://www.w3.org/2000/svg"};function er(e){switch(e){case"svg":return"http://www.w3.org/2000/svg";case"math":return"http://www.w3.org/1998/Math/MathML";default:return"http://www.w3.org/1999/xhtml"}}function tr(e,t){return null==e||"http://www.w3.org/1999/xhtml"===e?er(t):"http://www.w3.org/2000/svg"===e&&"foreignObject"===t?"http://www.w3.org/1999/xhtml":e}var nr,rr=void 0,or=(nr=function(e,t){if(e.namespaceURI!==Zn.svg||"innerHTML"in e)e.innerHTML=t;else{for((rr=rr||document.createElement("div")).innerHTML="<svg>"+t+"</svg>",t=rr.firstChild;e.firstChild;)e.removeChild(e.firstChild);for(;t.firstChild;)e.appendChild(t.firstChild)}},"undefined"!=typeof MSApp&&MSApp.execUnsafeLocalFunction?function(e,t,n,r){MSApp.execUnsafeLocalFunction(function(){return nr(e,t)})}:nr);function ir(e,t){if(t){var n=e.firstChild;if(n&&n===e.lastChild&&3===n.nodeType)return void(n.nodeValue=t)}e.textContent=t}var ar={animationIterationCount:!0,borderImageOutset:!0,borderImageSlice:!0,borderImageWidth:!0,boxFlex:!0,boxFlexGroup:!0,boxOrdinalGroup:!0,columnCount:!0,columns:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,flexOrder:!0,gridArea:!0,gridRow:!0,gridRowEnd:!0,gridRowSpan:!0,gridRowStart:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnSpan:!0,gridColumnStart:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeDasharray:!0,strokeDashoffset:!0,strokeMiterlimit:!0,strokeOpacity:!0,strokeWidth:!0},lr=["Webkit","ms","Moz","O"];function ur(e,t,n){return null==t||"boolean"==typeof t||""===t?"":n||"number"!=typeof t||0===t||ar.hasOwnProperty(e)&&ar[e]?(""+t).trim():t+"px"}function cr(e,t){for(var n in e=e.style,t)if(t.hasOwnProperty(n)){var r=0===n.indexOf("--"),o=ur(n,t[n],r);"float"===n&&(n="cssFloat"),r?e.setProperty(n,o):e[n]=o}}Object.keys(ar).forEach(function(e){lr.forEach(function(t){t=t+e.charAt(0).toUpperCase()+e.substring(1),ar[t]=ar[e]})});var sr=o({menuitem:!0},{area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,keygen:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0});function fr(e,t){t&&(sr[e]&&(null!=t.children||null!=t.dangerouslySetInnerHTML)&&a("137",e,""),null!=t.dangerouslySetInnerHTML&&(null!=t.children&&a("60"),"object"==typeof t.dangerouslySetInnerHTML&&"__html"in t.dangerouslySetInnerHTML||a("61")),null!=t.style&&"object"!=typeof t.style&&a("62",""))}function dr(e,t){if(-1===e.indexOf("-"))return"string"==typeof t.is;switch(e){case"annotation-xml":case"color-profile":case"font-face":case"font-face-src":case"font-face-uri":case"font-face-format":case"font-face-name":case"missing-glyph":return!1;default:return!0}}function pr(e,t){var n=jn(e=9===e.nodeType||11===e.nodeType?e:e.ownerDocument);t=w[t];for(var r=0;r<t.length;r++){var o=t[r];if(!n.hasOwnProperty(o)||!n[o]){switch(o){case"scroll":En("scroll",e);break;case"focus":case"blur":En("focus",e),En("blur",e),n.blur=!0,n.focus=!0;break;case"cancel":case"close":ze(o)&&En(o,e);break;case"invalid":case"submit":case"reset":break;default:-1===te.indexOf(o)&&Tn(o,e)}n[o]=!0}}}function hr(){}var mr=null,vr=null;function yr(e,t){switch(e){case"button":case"input":case"select":case"textarea":return!!t.autoFocus}return!1}function gr(e,t){return"textarea"===e||"option"===e||"noscript"===e||"string"==typeof t.children||"number"==typeof t.children||"object"==typeof t.dangerouslySetInnerHTML&&null!==t.dangerouslySetInnerHTML&&null!=t.dangerouslySetInnerHTML.__html}var br="function"==typeof setTimeout?setTimeout:void 0,wr="function"==typeof clearTimeout?clearTimeout:void 0,_r=i.unstable_scheduleCallback,xr=i.unstable_cancelCallback;function kr(e){for(e=e.nextSibling;e&&1!==e.nodeType&&3!==e.nodeType;)e=e.nextSibling;return e}function Sr(e){for(e=e.firstChild;e&&1!==e.nodeType&&3!==e.nodeType;)e=e.nextSibling;return e}new Set;var Tr=[],Er=-1;function Pr(e){0>Er||(e.current=Tr[Er],Tr[Er]=null,Er--)}function Cr(e,t){Tr[++Er]=e.current,e.current=t}var Rr={},Or={current:Rr},Nr={current:!1},jr=Rr;function Lr(e,t){var n=e.type.contextTypes;if(!n)return Rr;var r=e.stateNode;if(r&&r.__reactInternalMemoizedUnmaskedChildContext===t)return r.__reactInternalMemoizedMaskedChildContext;var o,i={};for(o in n)i[o]=t[o];return r&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=t,e.__reactInternalMemoizedMaskedChildContext=i),i}function Mr(e){return null!=(e=e.childContextTypes)}function Ur(e){Pr(Nr),Pr(Or)}function Ir(e){Pr(Nr),Pr(Or)}function Fr(e,t,n){Or.current!==Rr&&a("168"),Cr(Or,t),Cr(Nr,n)}function Ar(e,t,n){var r=e.stateNode;if(e=t.childContextTypes,"function"!=typeof r.getChildContext)return n;for(var i in r=r.getChildContext())i in e||a("108",lt(t)||"Unknown",i);return o({},n,r)}function Dr(e){var t=e.stateNode;return t=t&&t.__reactInternalMemoizedMergedChildContext||Rr,jr=Or.current,Cr(Or,t),Cr(Nr,Nr.current),!0}function zr(e,t,n){var r=e.stateNode;r||a("169"),n?(t=Ar(e,t,jr),r.__reactInternalMemoizedMergedChildContext=t,Pr(Nr),Pr(Or),Cr(Or,t)):Pr(Nr),Cr(Nr,n)}var Wr=null,Br=null;function Hr(e){return function(t){try{return e(t)}catch(n){}}}function Vr(e,t,n,r){this.tag=e,this.key=n,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=t,this.contextDependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=r,this.effectTag=0,this.lastEffect=this.firstEffect=this.nextEffect=null,this.childExpirationTime=this.expirationTime=0,this.alternate=null}function $r(e,t,n,r){return new Vr(e,t,n,r)}function qr(e){return!(!(e=e.prototype)||!e.isReactComponent)}function Kr(e,t){var n=e.alternate;return null===n?((n=$r(e.tag,t,e.key,e.mode)).elementType=e.elementType,n.type=e.type,n.stateNode=e.stateNode,n.alternate=e,e.alternate=n):(n.pendingProps=t,n.effectTag=0,n.nextEffect=null,n.firstEffect=null,n.lastEffect=null),n.childExpirationTime=e.childExpirationTime,n.expirationTime=e.expirationTime,n.child=e.child,n.memoizedProps=e.memoizedProps,n.memoizedState=e.memoizedState,n.updateQueue=e.updateQueue,n.contextDependencies=e.contextDependencies,n.sibling=e.sibling,n.index=e.index,n.ref=e.ref,n}function Qr(e,t,n,r,o,i){var l=2;if(r=e,"function"==typeof e)qr(e)&&(l=1);else if("string"==typeof e)l=5;else e:switch(e){case Ge:return Gr(n.children,o,i,t);case et:return Yr(n,3|o,i,t);case Ye:return Yr(n,2|o,i,t);case Xe:return(e=$r(12,n,t,4|o)).elementType=Xe,e.type=Xe,e.expirationTime=i,e;case nt:return(e=$r(13,n,t,o)).elementType=nt,e.type=nt,e.expirationTime=i,e;default:if("object"==typeof e&&null!==e)switch(e.$$typeof){case Je:l=10;break e;case Ze:l=9;break e;case tt:l=11;break e;case rt:l=14;break e;case ot:l=16,r=null;break e}a("130",null==e?e:typeof e,"")}return(t=$r(l,n,t,o)).elementType=e,t.type=r,t.expirationTime=i,t}function Gr(e,t,n,r){return(e=$r(7,e,r,t)).expirationTime=n,e}function Yr(e,t,n,r){return e=$r(8,e,r,t),t=0==(1&t)?Ye:et,e.elementType=t,e.type=t,e.expirationTime=n,e}function Xr(e,t,n){return(e=$r(6,e,null,t)).expirationTime=n,e}function Jr(e,t,n){return(t=$r(4,null!==e.children?e.children:[],e.key,t)).expirationTime=n,t.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},t}function Zr(e,t){e.didError=!1;var n=e.earliestPendingTime;0===n?e.earliestPendingTime=e.latestPendingTime=t:n<t?e.earliestPendingTime=t:e.latestPendingTime>t&&(e.latestPendingTime=t),no(t,e)}function eo(e,t){e.didError=!1,e.latestPingedTime>=t&&(e.latestPingedTime=0);var n=e.earliestPendingTime,r=e.latestPendingTime;n===t?e.earliestPendingTime=r===t?e.latestPendingTime=0:r:r===t&&(e.latestPendingTime=n),n=e.earliestSuspendedTime,r=e.latestSuspendedTime,0===n?e.earliestSuspendedTime=e.latestSuspendedTime=t:n<t?e.earliestSuspendedTime=t:r>t&&(e.latestSuspendedTime=t),no(t,e)}function to(e,t){var n=e.earliestPendingTime;return n>t&&(t=n),(e=e.earliestSuspendedTime)>t&&(t=e),t}function no(e,t){var n=t.earliestSuspendedTime,r=t.latestSuspendedTime,o=t.earliestPendingTime,i=t.latestPingedTime;0===(o=0!==o?o:i)&&(0===e||r<e)&&(o=r),0!==(e=o)&&n>e&&(e=n),t.nextExpirationTimeToWorkOn=o,t.expirationTime=e}function ro(e,t){if(e&&e.defaultProps)for(var n in t=o({},t),e=e.defaultProps)void 0===t[n]&&(t[n]=e[n]);return t}var oo=(new r.Component).refs;function io(e,t,n,r){n=null==(n=n(r,t=e.memoizedState))?t:o({},t,n),e.memoizedState=n,null!==(r=e.updateQueue)&&0===e.expirationTime&&(r.baseState=n)}var ao={isMounted:function(e){return!!(e=e._reactInternalFiber)&&2===tn(e)},enqueueSetState:function(e,t,n){e=e._reactInternalFiber;var r=xl(),o=Yi(r=Ga(r,e));o.payload=t,null!=n&&(o.callback=n),Ha(),Ji(e,o),Ja(e,r)},enqueueReplaceState:function(e,t,n){e=e._reactInternalFiber;var r=xl(),o=Yi(r=Ga(r,e));o.tag=Vi,o.payload=t,null!=n&&(o.callback=n),Ha(),Ji(e,o),Ja(e,r)},enqueueForceUpdate:function(e,t){e=e._reactInternalFiber;var n=xl(),r=Yi(n=Ga(n,e));r.tag=$i,null!=t&&(r.callback=t),Ha(),Ji(e,r),Ja(e,n)}};function lo(e,t,n,r,o,i,a){return"function"==typeof(e=e.stateNode).shouldComponentUpdate?e.shouldComponentUpdate(r,i,a):!t.prototype||!t.prototype.isPureReactComponent||(!en(n,r)||!en(o,i))}function uo(e,t,n){var r=!1,o=Rr,i=t.contextType;return"object"==typeof i&&null!==i?i=Bi(i):(o=Mr(t)?jr:Or.current,i=(r=null!=(r=t.contextTypes))?Lr(e,o):Rr),t=new t(n,i),e.memoizedState=null!==t.state&&void 0!==t.state?t.state:null,t.updater=ao,e.stateNode=t,t._reactInternalFiber=e,r&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=o,e.__reactInternalMemoizedMaskedChildContext=i),t}function co(e,t,n,r){e=t.state,"function"==typeof t.componentWillReceiveProps&&t.componentWillReceiveProps(n,r),"function"==typeof t.UNSAFE_componentWillReceiveProps&&t.UNSAFE_componentWillReceiveProps(n,r),t.state!==e&&ao.enqueueReplaceState(t,t.state,null)}function so(e,t,n,r){var o=e.stateNode;o.props=n,o.state=e.memoizedState,o.refs=oo;var i=t.contextType;"object"==typeof i&&null!==i?o.context=Bi(i):(i=Mr(t)?jr:Or.current,o.context=Lr(e,i)),null!==(i=e.updateQueue)&&(na(e,i,n,o,r),o.state=e.memoizedState),"function"==typeof(i=t.getDerivedStateFromProps)&&(io(e,t,i,n),o.state=e.memoizedState),"function"==typeof t.getDerivedStateFromProps||"function"==typeof o.getSnapshotBeforeUpdate||"function"!=typeof o.UNSAFE_componentWillMount&&"function"!=typeof o.componentWillMount||(t=o.state,"function"==typeof o.componentWillMount&&o.componentWillMount(),"function"==typeof o.UNSAFE_componentWillMount&&o.UNSAFE_componentWillMount(),t!==o.state&&ao.enqueueReplaceState(o,o.state,null),null!==(i=e.updateQueue)&&(na(e,i,n,o,r),o.state=e.memoizedState)),"function"==typeof o.componentDidMount&&(e.effectTag|=4)}var fo=Array.isArray;function po(e,t,n){if(null!==(e=n.ref)&&"function"!=typeof e&&"object"!=typeof e){if(n._owner){n=n._owner;var r=void 0;n&&(1!==n.tag&&a("309"),r=n.stateNode),r||a("147",e);var o=""+e;return null!==t&&null!==t.ref&&"function"==typeof t.ref&&t.ref._stringRef===o?t.ref:((t=function(e){var t=r.refs;t===oo&&(t=r.refs={}),null===e?delete t[o]:t[o]=e})._stringRef=o,t)}"string"!=typeof e&&a("284"),n._owner||a("290",e)}return e}function ho(e,t){"textarea"!==e.type&&a("31","[object Object]"===Object.prototype.toString.call(t)?"object with keys {"+Object.keys(t).join(", ")+"}":t,"")}function mo(e){function t(t,n){if(e){var r=t.lastEffect;null!==r?(r.nextEffect=n,t.lastEffect=n):t.firstEffect=t.lastEffect=n,n.nextEffect=null,n.effectTag=8}}function n(n,r){if(!e)return null;for(;null!==r;)t(n,r),r=r.sibling;return null}function r(e,t){for(e=new Map;null!==t;)null!==t.key?e.set(t.key,t):e.set(t.index,t),t=t.sibling;return e}function o(e,t,n){return(e=Kr(e,t)).index=0,e.sibling=null,e}function i(t,n,r){return t.index=r,e?null!==(r=t.alternate)?(r=r.index)<n?(t.effectTag=2,n):r:(t.effectTag=2,n):n}function l(t){return e&&null===t.alternate&&(t.effectTag=2),t}function u(e,t,n,r){return null===t||6!==t.tag?((t=Xr(n,e.mode,r)).return=e,t):((t=o(t,n)).return=e,t)}function c(e,t,n,r){return null!==t&&t.elementType===n.type?((r=o(t,n.props)).ref=po(e,t,n),r.return=e,r):((r=Qr(n.type,n.key,n.props,null,e.mode,r)).ref=po(e,t,n),r.return=e,r)}function s(e,t,n,r){return null===t||4!==t.tag||t.stateNode.containerInfo!==n.containerInfo||t.stateNode.implementation!==n.implementation?((t=Jr(n,e.mode,r)).return=e,t):((t=o(t,n.children||[])).return=e,t)}function f(e,t,n,r,i){return null===t||7!==t.tag?((t=Gr(n,e.mode,r,i)).return=e,t):((t=o(t,n)).return=e,t)}function d(e,t,n){if("string"==typeof t||"number"==typeof t)return(t=Xr(""+t,e.mode,n)).return=e,t;if("object"==typeof t&&null!==t){switch(t.$$typeof){case Ke:return(n=Qr(t.type,t.key,t.props,null,e.mode,n)).ref=po(e,null,t),n.return=e,n;case Qe:return(t=Jr(t,e.mode,n)).return=e,t}if(fo(t)||at(t))return(t=Gr(t,e.mode,n,null)).return=e,t;ho(e,t)}return null}function p(e,t,n,r){var o=null!==t?t.key:null;if("string"==typeof n||"number"==typeof n)return null!==o?null:u(e,t,""+n,r);if("object"==typeof n&&null!==n){switch(n.$$typeof){case Ke:return n.key===o?n.type===Ge?f(e,t,n.props.children,r,o):c(e,t,n,r):null;case Qe:return n.key===o?s(e,t,n,r):null}if(fo(n)||at(n))return null!==o?null:f(e,t,n,r,null);ho(e,n)}return null}function h(e,t,n,r,o){if("string"==typeof r||"number"==typeof r)return u(t,e=e.get(n)||null,""+r,o);if("object"==typeof r&&null!==r){switch(r.$$typeof){case Ke:return e=e.get(null===r.key?n:r.key)||null,r.type===Ge?f(t,e,r.props.children,o,r.key):c(t,e,r,o);case Qe:return s(t,e=e.get(null===r.key?n:r.key)||null,r,o)}if(fo(r)||at(r))return f(t,e=e.get(n)||null,r,o,null);ho(t,r)}return null}function m(o,a,l,u){for(var c=null,s=null,f=a,m=a=0,v=null;null!==f&&m<l.length;m++){f.index>m?(v=f,f=null):v=f.sibling;var y=p(o,f,l[m],u);if(null===y){null===f&&(f=v);break}e&&f&&null===y.alternate&&t(o,f),a=i(y,a,m),null===s?c=y:s.sibling=y,s=y,f=v}if(m===l.length)return n(o,f),c;if(null===f){for(;m<l.length;m++)(f=d(o,l[m],u))&&(a=i(f,a,m),null===s?c=f:s.sibling=f,s=f);return c}for(f=r(o,f);m<l.length;m++)(v=h(f,o,m,l[m],u))&&(e&&null!==v.alternate&&f.delete(null===v.key?m:v.key),a=i(v,a,m),null===s?c=v:s.sibling=v,s=v);return e&&f.forEach(function(e){return t(o,e)}),c}function v(o,l,u,c){var s=at(u);"function"!=typeof s&&a("150"),null==(u=s.call(u))&&a("151");for(var f=s=null,m=l,v=l=0,y=null,g=u.next();null!==m&&!g.done;v++,g=u.next()){m.index>v?(y=m,m=null):y=m.sibling;var b=p(o,m,g.value,c);if(null===b){m||(m=y);break}e&&m&&null===b.alternate&&t(o,m),l=i(b,l,v),null===f?s=b:f.sibling=b,f=b,m=y}if(g.done)return n(o,m),s;if(null===m){for(;!g.done;v++,g=u.next())null!==(g=d(o,g.value,c))&&(l=i(g,l,v),null===f?s=g:f.sibling=g,f=g);return s}for(m=r(o,m);!g.done;v++,g=u.next())null!==(g=h(m,o,v,g.value,c))&&(e&&null!==g.alternate&&m.delete(null===g.key?v:g.key),l=i(g,l,v),null===f?s=g:f.sibling=g,f=g);return e&&m.forEach(function(e){return t(o,e)}),s}return function(e,r,i,u){var c="object"==typeof i&&null!==i&&i.type===Ge&&null===i.key;c&&(i=i.props.children);var s="object"==typeof i&&null!==i;if(s)switch(i.$$typeof){case Ke:e:{for(s=i.key,c=r;null!==c;){if(c.key===s){if(7===c.tag?i.type===Ge:c.elementType===i.type){n(e,c.sibling),(r=o(c,i.type===Ge?i.props.children:i.props)).ref=po(e,c,i),r.return=e,e=r;break e}n(e,c);break}t(e,c),c=c.sibling}i.type===Ge?((r=Gr(i.props.children,e.mode,u,i.key)).return=e,e=r):((u=Qr(i.type,i.key,i.props,null,e.mode,u)).ref=po(e,r,i),u.return=e,e=u)}return l(e);case Qe:e:{for(c=i.key;null!==r;){if(r.key===c){if(4===r.tag&&r.stateNode.containerInfo===i.containerInfo&&r.stateNode.implementation===i.implementation){n(e,r.sibling),(r=o(r,i.children||[])).return=e,e=r;break e}n(e,r);break}t(e,r),r=r.sibling}(r=Jr(i,e.mode,u)).return=e,e=r}return l(e)}if("string"==typeof i||"number"==typeof i)return i=""+i,null!==r&&6===r.tag?(n(e,r.sibling),(r=o(r,i)).return=e,e=r):(n(e,r),(r=Xr(i,e.mode,u)).return=e,e=r),l(e);if(fo(i))return m(e,r,i,u);if(at(i))return v(e,r,i,u);if(s&&ho(e,i),void 0===i&&!c)switch(e.tag){case 1:case 0:a("152",(u=e.type).displayName||u.name||"Component")}return n(e,r)}}var vo=mo(!0),yo=mo(!1),go={},bo={current:go},wo={current:go},_o={current:go};function xo(e){return e===go&&a("174"),e}function ko(e,t){Cr(_o,t),Cr(wo,e),Cr(bo,go);var n=t.nodeType;switch(n){case 9:case 11:t=(t=t.documentElement)?t.namespaceURI:tr(null,"");break;default:t=tr(t=(n=8===n?t.parentNode:t).namespaceURI||null,n=n.tagName)}Pr(bo),Cr(bo,t)}function So(e){Pr(bo),Pr(wo),Pr(_o)}function To(e){xo(_o.current);var t=xo(bo.current),n=tr(t,e.type);t!==n&&(Cr(wo,e),Cr(bo,n))}function Eo(e){wo.current===e&&(Pr(bo),Pr(wo))}var Po=0,Co=2,Ro=4,Oo=8,No=16,jo=32,Lo=64,Mo=128,Uo=Ve.ReactCurrentDispatcher,Io=0,Fo=null,Ao=null,Do=null,zo=null,Wo=null,Bo=null,Ho=0,Vo=null,$o=0,qo=!1,Ko=null,Qo=0;function Go(){a("321")}function Yo(e,t){if(null===t)return!1;for(var n=0;n<t.length&&n<e.length;n++)if(!Jt(e[n],t[n]))return!1;return!0}function Xo(e,t,n,r,o,i){if(Io=i,Fo=t,Do=null!==e?e.memoizedState:null,Uo.current=null===Do?si:fi,t=n(r,o),qo){do{qo=!1,Qo+=1,Do=null!==e?e.memoizedState:null,Bo=zo,Vo=Wo=Ao=null,Uo.current=fi,t=n(r,o)}while(qo);Ko=null,Qo=0}return Uo.current=ci,(e=Fo).memoizedState=zo,e.expirationTime=Ho,e.updateQueue=Vo,e.effectTag|=$o,e=null!==Ao&&null!==Ao.next,Io=0,Bo=Wo=zo=Do=Ao=Fo=null,Ho=0,Vo=null,$o=0,e&&a("300"),t}function Jo(){Uo.current=ci,Io=0,Bo=Wo=zo=Do=Ao=Fo=null,Ho=0,Vo=null,$o=0,qo=!1,Ko=null,Qo=0}function Zo(){var e={memoizedState:null,baseState:null,queue:null,baseUpdate:null,next:null};return null===Wo?zo=Wo=e:Wo=Wo.next=e,Wo}function ei(){if(null!==Bo)Bo=(Wo=Bo).next,Do=null!==(Ao=Do)?Ao.next:null;else{null===Do&&a("310");var e={memoizedState:(Ao=Do).memoizedState,baseState:Ao.baseState,queue:Ao.queue,baseUpdate:Ao.baseUpdate,next:null};Wo=null===Wo?zo=e:Wo.next=e,Do=Ao.next}return Wo}function ti(e,t){return"function"==typeof t?t(e):t}function ni(e){var t=ei(),n=t.queue;if(null===n&&a("311"),n.lastRenderedReducer=e,0<Qo){var r=n.dispatch;if(null!==Ko){var o=Ko.get(n);if(void 0!==o){Ko.delete(n);var i=t.memoizedState;do{i=e(i,o.action),o=o.next}while(null!==o);return Jt(i,t.memoizedState)||(xi=!0),t.memoizedState=i,t.baseUpdate===n.last&&(t.baseState=i),n.lastRenderedState=i,[i,r]}}return[t.memoizedState,r]}r=n.last;var l=t.baseUpdate;if(i=t.baseState,null!==l?(null!==r&&(r.next=null),r=l.next):r=null!==r?r.next:null,null!==r){var u=o=null,c=r,s=!1;do{var f=c.expirationTime;f<Io?(s||(s=!0,u=l,o=i),f>Ho&&(Ho=f)):i=c.eagerReducer===e?c.eagerState:e(i,c.action),l=c,c=c.next}while(null!==c&&c!==r);s||(u=l,o=i),Jt(i,t.memoizedState)||(xi=!0),t.memoizedState=i,t.baseUpdate=u,t.baseState=o,n.lastRenderedState=i}return[t.memoizedState,n.dispatch]}function ri(e,t,n,r){return e={tag:e,create:t,destroy:n,deps:r,next:null},null===Vo?(Vo={lastEffect:null}).lastEffect=e.next=e:null===(t=Vo.lastEffect)?Vo.lastEffect=e.next=e:(n=t.next,t.next=e,e.next=n,Vo.lastEffect=e),e}function oi(e,t,n,r){var o=Zo();$o|=e,o.memoizedState=ri(t,n,void 0,void 0===r?null:r)}function ii(e,t,n,r){var o=ei();r=void 0===r?null:r;var i=void 0;if(null!==Ao){var a=Ao.memoizedState;if(i=a.destroy,null!==r&&Yo(r,a.deps))return void ri(Po,n,i,r)}$o|=e,o.memoizedState=ri(t,n,i,r)}function ai(e,t){return"function"==typeof t?(e=e(),t(e),function(){t(null)}):null!=t?(e=e(),t.current=e,function(){t.current=null}):void 0}function li(){}function ui(e,t,n){25>Qo||a("301");var r=e.alternate;if(e===Fo||null!==r&&r===Fo)if(qo=!0,e={expirationTime:Io,action:n,eagerReducer:null,eagerState:null,next:null},null===Ko&&(Ko=new Map),void 0===(n=Ko.get(t)))Ko.set(t,e);else{for(t=n;null!==t.next;)t=t.next;t.next=e}else{Ha();var o=xl(),i={expirationTime:o=Ga(o,e),action:n,eagerReducer:null,eagerState:null,next:null},l=t.last;if(null===l)i.next=i;else{var u=l.next;null!==u&&(i.next=u),l.next=i}if(t.last=i,0===e.expirationTime&&(null===r||0===r.expirationTime)&&null!==(r=t.lastRenderedReducer))try{var c=t.lastRenderedState,s=r(c,n);if(i.eagerReducer=r,i.eagerState=s,Jt(s,c))return}catch(f){}Ja(e,o)}}var ci={readContext:Bi,useCallback:Go,useContext:Go,useEffect:Go,useImperativeHandle:Go,useLayoutEffect:Go,useMemo:Go,useReducer:Go,useRef:Go,useState:Go,useDebugValue:Go},si={readContext:Bi,useCallback:function(e,t){return Zo().memoizedState=[e,void 0===t?null:t],e},useContext:Bi,useEffect:function(e,t){return oi(516,Mo|Lo,e,t)},useImperativeHandle:function(e,t,n){return n=null!=n?n.concat([e]):null,oi(4,Ro|jo,ai.bind(null,t,e),n)},useLayoutEffect:function(e,t){return oi(4,Ro|jo,e,t)},useMemo:function(e,t){var n=Zo();return t=void 0===t?null:t,e=e(),n.memoizedState=[e,t],e},useReducer:function(e,t,n){var r=Zo();return t=void 0!==n?n(t):t,r.memoizedState=r.baseState=t,e=(e=r.queue={last:null,dispatch:null,lastRenderedReducer:e,lastRenderedState:t}).dispatch=ui.bind(null,Fo,e),[r.memoizedState,e]},useRef:function(e){return e={current:e},Zo().memoizedState=e},useState:function(e){var t=Zo();return"function"==typeof e&&(e=e()),t.memoizedState=t.baseState=e,e=(e=t.queue={last:null,dispatch:null,lastRenderedReducer:ti,lastRenderedState:e}).dispatch=ui.bind(null,Fo,e),[t.memoizedState,e]},useDebugValue:li},fi={readContext:Bi,useCallback:function(e,t){var n=ei();t=void 0===t?null:t;var r=n.memoizedState;return null!==r&&null!==t&&Yo(t,r[1])?r[0]:(n.memoizedState=[e,t],e)},useContext:Bi,useEffect:function(e,t){return ii(516,Mo|Lo,e,t)},useImperativeHandle:function(e,t,n){return n=null!=n?n.concat([e]):null,ii(4,Ro|jo,ai.bind(null,t,e),n)},useLayoutEffect:function(e,t){return ii(4,Ro|jo,e,t)},useMemo:function(e,t){var n=ei();t=void 0===t?null:t;var r=n.memoizedState;return null!==r&&null!==t&&Yo(t,r[1])?r[0]:(e=e(),n.memoizedState=[e,t],e)},useReducer:ni,useRef:function(){return ei().memoizedState},useState:function(e){return ni(ti)},useDebugValue:li},di=null,pi=null,hi=!1;function mi(e,t){var n=$r(5,null,null,0);n.elementType="DELETED",n.type="DELETED",n.stateNode=t,n.return=e,n.effectTag=8,null!==e.lastEffect?(e.lastEffect.nextEffect=n,e.lastEffect=n):e.firstEffect=e.lastEffect=n}function vi(e,t){switch(e.tag){case 5:var n=e.type;return null!==(t=1!==t.nodeType||n.toLowerCase()!==t.nodeName.toLowerCase()?null:t)&&(e.stateNode=t,!0);case 6:return null!==(t=""===e.pendingProps||3!==t.nodeType?null:t)&&(e.stateNode=t,!0);case 13:default:return!1}}function yi(e){if(hi){var t=pi;if(t){var n=t;if(!vi(e,t)){if(!(t=kr(n))||!vi(e,t))return e.effectTag|=2,hi=!1,void(di=e);mi(di,n)}di=e,pi=Sr(t)}else e.effectTag|=2,hi=!1,di=e}}function gi(e){for(e=e.return;null!==e&&5!==e.tag&&3!==e.tag&&18!==e.tag;)e=e.return;di=e}function bi(e){if(e!==di)return!1;if(!hi)return gi(e),hi=!0,!1;var t=e.type;if(5!==e.tag||"head"!==t&&"body"!==t&&!gr(t,e.memoizedProps))for(t=pi;t;)mi(e,t),t=kr(t);return gi(e),pi=di?kr(e.stateNode):null,!0}function wi(){pi=di=null,hi=!1}var _i=Ve.ReactCurrentOwner,xi=!1;function ki(e,t,n,r){t.child=null===e?yo(t,null,n,r):vo(t,e.child,n,r)}function Si(e,t,n,r,o){n=n.render;var i=t.ref;return Wi(t,o),r=Xo(e,t,n,r,i,o),null===e||xi?(t.effectTag|=1,ki(e,t,r,o),t.child):(t.updateQueue=e.updateQueue,t.effectTag&=-517,e.expirationTime<=o&&(e.expirationTime=0),Li(e,t,o))}function Ti(e,t,n,r,o,i){if(null===e){var a=n.type;return"function"!=typeof a||qr(a)||void 0!==a.defaultProps||null!==n.compare||void 0!==n.defaultProps?((e=Qr(n.type,null,r,null,t.mode,i)).ref=t.ref,e.return=t,t.child=e):(t.tag=15,t.type=a,Ei(e,t,a,r,o,i))}return a=e.child,o<i&&(o=a.memoizedProps,(n=null!==(n=n.compare)?n:en)(o,r)&&e.ref===t.ref)?Li(e,t,i):(t.effectTag|=1,(e=Kr(a,r)).ref=t.ref,e.return=t,t.child=e)}function Ei(e,t,n,r,o,i){return null!==e&&en(e.memoizedProps,r)&&e.ref===t.ref&&(xi=!1,o<i)?Li(e,t,i):Ci(e,t,n,r,i)}function Pi(e,t){var n=t.ref;(null===e&&null!==n||null!==e&&e.ref!==n)&&(t.effectTag|=128)}function Ci(e,t,n,r,o){var i=Mr(n)?jr:Or.current;return i=Lr(t,i),Wi(t,o),n=Xo(e,t,n,r,i,o),null===e||xi?(t.effectTag|=1,ki(e,t,n,o),t.child):(t.updateQueue=e.updateQueue,t.effectTag&=-517,e.expirationTime<=o&&(e.expirationTime=0),Li(e,t,o))}function Ri(e,t,n,r,o){if(Mr(n)){var i=!0;Dr(t)}else i=!1;if(Wi(t,o),null===t.stateNode)null!==e&&(e.alternate=null,t.alternate=null,t.effectTag|=2),uo(t,n,r),so(t,n,r,o),r=!0;else if(null===e){var a=t.stateNode,l=t.memoizedProps;a.props=l;var u=a.context,c=n.contextType;"object"==typeof c&&null!==c?c=Bi(c):c=Lr(t,c=Mr(n)?jr:Or.current);var s=n.getDerivedStateFromProps,f="function"==typeof s||"function"==typeof a.getSnapshotBeforeUpdate;f||"function"!=typeof a.UNSAFE_componentWillReceiveProps&&"function"!=typeof a.componentWillReceiveProps||(l!==r||u!==c)&&co(t,a,r,c),Ki=!1;var d=t.memoizedState;u=a.state=d;var p=t.updateQueue;null!==p&&(na(t,p,r,a,o),u=t.memoizedState),l!==r||d!==u||Nr.current||Ki?("function"==typeof s&&(io(t,n,s,r),u=t.memoizedState),(l=Ki||lo(t,n,l,r,d,u,c))?(f||"function"!=typeof a.UNSAFE_componentWillMount&&"function"!=typeof a.componentWillMount||("function"==typeof a.componentWillMount&&a.componentWillMount(),"function"==typeof a.UNSAFE_componentWillMount&&a.UNSAFE_componentWillMount()),"function"==typeof a.componentDidMount&&(t.effectTag|=4)):("function"==typeof a.componentDidMount&&(t.effectTag|=4),t.memoizedProps=r,t.memoizedState=u),a.props=r,a.state=u,a.context=c,r=l):("function"==typeof a.componentDidMount&&(t.effectTag|=4),r=!1)}else a=t.stateNode,l=t.memoizedProps,a.props=t.type===t.elementType?l:ro(t.type,l),u=a.context,"object"==typeof(c=n.contextType)&&null!==c?c=Bi(c):c=Lr(t,c=Mr(n)?jr:Or.current),(f="function"==typeof(s=n.getDerivedStateFromProps)||"function"==typeof a.getSnapshotBeforeUpdate)||"function"!=typeof a.UNSAFE_componentWillReceiveProps&&"function"!=typeof a.componentWillReceiveProps||(l!==r||u!==c)&&co(t,a,r,c),Ki=!1,u=t.memoizedState,d=a.state=u,null!==(p=t.updateQueue)&&(na(t,p,r,a,o),d=t.memoizedState),l!==r||u!==d||Nr.current||Ki?("function"==typeof s&&(io(t,n,s,r),d=t.memoizedState),(s=Ki||lo(t,n,l,r,u,d,c))?(f||"function"!=typeof a.UNSAFE_componentWillUpdate&&"function"!=typeof a.componentWillUpdate||("function"==typeof a.componentWillUpdate&&a.componentWillUpdate(r,d,c),"function"==typeof a.UNSAFE_componentWillUpdate&&a.UNSAFE_componentWillUpdate(r,d,c)),"function"==typeof a.componentDidUpdate&&(t.effectTag|=4),"function"==typeof a.getSnapshotBeforeUpdate&&(t.effectTag|=256)):("function"!=typeof a.componentDidUpdate||l===e.memoizedProps&&u===e.memoizedState||(t.effectTag|=4),"function"!=typeof a.getSnapshotBeforeUpdate||l===e.memoizedProps&&u===e.memoizedState||(t.effectTag|=256),t.memoizedProps=r,t.memoizedState=d),a.props=r,a.state=d,a.context=c,r=s):("function"!=typeof a.componentDidUpdate||l===e.memoizedProps&&u===e.memoizedState||(t.effectTag|=4),"function"!=typeof a.getSnapshotBeforeUpdate||l===e.memoizedProps&&u===e.memoizedState||(t.effectTag|=256),r=!1);return Oi(e,t,n,r,i,o)}function Oi(e,t,n,r,o,i){Pi(e,t);var a=0!=(64&t.effectTag);if(!r&&!a)return o&&zr(t,n,!1),Li(e,t,i);r=t.stateNode,_i.current=t;var l=a&&"function"!=typeof n.getDerivedStateFromError?null:r.render();return t.effectTag|=1,null!==e&&a?(t.child=vo(t,e.child,null,i),t.child=vo(t,null,l,i)):ki(e,t,l,i),t.memoizedState=r.state,o&&zr(t,n,!0),t.child}function Ni(e){var t=e.stateNode;t.pendingContext?Fr(0,t.pendingContext,t.pendingContext!==t.context):t.context&&Fr(0,t.context,!1),ko(e,t.containerInfo)}function ji(e,t,n){var r=t.mode,o=t.pendingProps,i=t.memoizedState;if(0==(64&t.effectTag)){i=null;var a=!1}else i={timedOutAt:null!==i?i.timedOutAt:0},a=!0,t.effectTag&=-65;if(null===e)if(a){var l=o.fallback;e=Gr(null,r,0,null),0==(1&t.mode)&&(e.child=null!==t.memoizedState?t.child.child:t.child),r=Gr(l,r,n,null),e.sibling=r,(n=e).return=r.return=t}else n=r=yo(t,null,o.children,n);else null!==e.memoizedState?(l=(r=e.child).sibling,a?(n=o.fallback,o=Kr(r,r.pendingProps),0==(1&t.mode)&&((a=null!==t.memoizedState?t.child.child:t.child)!==r.child&&(o.child=a)),r=o.sibling=Kr(l,n,l.expirationTime),n=o,o.childExpirationTime=0,n.return=r.return=t):n=r=vo(t,r.child,o.children,n)):(l=e.child,a?(a=o.fallback,(o=Gr(null,r,0,null)).child=l,0==(1&t.mode)&&(o.child=null!==t.memoizedState?t.child.child:t.child),(r=o.sibling=Gr(a,r,n,null)).effectTag|=2,n=o,o.childExpirationTime=0,n.return=r.return=t):r=n=vo(t,l,o.children,n)),t.stateNode=e.stateNode;return t.memoizedState=i,t.child=n,r}function Li(e,t,n){if(null!==e&&(t.contextDependencies=e.contextDependencies),t.childExpirationTime<n)return null;if(null!==e&&t.child!==e.child&&a("153"),null!==t.child){for(n=Kr(e=t.child,e.pendingProps,e.expirationTime),t.child=n,n.return=t;null!==e.sibling;)e=e.sibling,(n=n.sibling=Kr(e,e.pendingProps,e.expirationTime)).return=t;n.sibling=null}return t.child}function Mi(e,t,n){var r=t.expirationTime;if(null!==e){if(e.memoizedProps!==t.pendingProps||Nr.current)xi=!0;else if(r<n){switch(xi=!1,t.tag){case 3:Ni(t),wi();break;case 5:To(t);break;case 1:Mr(t.type)&&Dr(t);break;case 4:ko(t,t.stateNode.containerInfo);break;case 10:Di(t,t.memoizedProps.value);break;case 13:if(null!==t.memoizedState)return 0!==(r=t.child.childExpirationTime)&&r>=n?ji(e,t,n):null!==(t=Li(e,t,n))?t.sibling:null}return Li(e,t,n)}}else xi=!1;switch(t.expirationTime=0,t.tag){case 2:r=t.elementType,null!==e&&(e.alternate=null,t.alternate=null,t.effectTag|=2),e=t.pendingProps;var o=Lr(t,Or.current);if(Wi(t,n),o=Xo(null,t,r,e,o,n),t.effectTag|=1,"object"==typeof o&&null!==o&&"function"==typeof o.render&&void 0===o.$$typeof){if(t.tag=1,Jo(),Mr(r)){var i=!0;Dr(t)}else i=!1;t.memoizedState=null!==o.state&&void 0!==o.state?o.state:null;var l=r.getDerivedStateFromProps;"function"==typeof l&&io(t,r,l,e),o.updater=ao,t.stateNode=o,o._reactInternalFiber=t,so(t,r,e,n),t=Oi(null,t,r,!0,i,n)}else t.tag=0,ki(null,t,o,n),t=t.child;return t;case 16:switch(o=t.elementType,null!==e&&(e.alternate=null,t.alternate=null,t.effectTag|=2),i=t.pendingProps,e=function(e){var t=e._result;switch(e._status){case 1:return t;case 2:case 0:throw t;default:switch(e._status=0,(t=(t=e._ctor)()).then(function(t){0===e._status&&(t=t.default,e._status=1,e._result=t)},function(t){0===e._status&&(e._status=2,e._result=t)}),e._status){case 1:return e._result;case 2:throw e._result}throw e._result=t,t}}(o),t.type=e,o=t.tag=function(e){if("function"==typeof e)return qr(e)?1:0;if(null!=e){if((e=e.$$typeof)===tt)return 11;if(e===rt)return 14}return 2}(e),i=ro(e,i),l=void 0,o){case 0:l=Ci(null,t,e,i,n);break;case 1:l=Ri(null,t,e,i,n);break;case 11:l=Si(null,t,e,i,n);break;case 14:l=Ti(null,t,e,ro(e.type,i),r,n);break;default:a("306",e,"")}return l;case 0:return r=t.type,o=t.pendingProps,Ci(e,t,r,o=t.elementType===r?o:ro(r,o),n);case 1:return r=t.type,o=t.pendingProps,Ri(e,t,r,o=t.elementType===r?o:ro(r,o),n);case 3:return Ni(t),null===(r=t.updateQueue)&&a("282"),o=null!==(o=t.memoizedState)?o.element:null,na(t,r,t.pendingProps,null,n),(r=t.memoizedState.element)===o?(wi(),t=Li(e,t,n)):(o=t.stateNode,(o=(null===e||null===e.child)&&o.hydrate)&&(pi=Sr(t.stateNode.containerInfo),di=t,o=hi=!0),o?(t.effectTag|=2,t.child=yo(t,null,r,n)):(ki(e,t,r,n),wi()),t=t.child),t;case 5:return To(t),null===e&&yi(t),r=t.type,o=t.pendingProps,i=null!==e?e.memoizedProps:null,l=o.children,gr(r,o)?l=null:null!==i&&gr(r,i)&&(t.effectTag|=16),Pi(e,t),1!==n&&1&t.mode&&o.hidden?(t.expirationTime=t.childExpirationTime=1,t=null):(ki(e,t,l,n),t=t.child),t;case 6:return null===e&&yi(t),null;case 13:return ji(e,t,n);case 4:return ko(t,t.stateNode.containerInfo),r=t.pendingProps,null===e?t.child=vo(t,null,r,n):ki(e,t,r,n),t.child;case 11:return r=t.type,o=t.pendingProps,Si(e,t,r,o=t.elementType===r?o:ro(r,o),n);case 7:return ki(e,t,t.pendingProps,n),t.child;case 8:case 12:return ki(e,t,t.pendingProps.children,n),t.child;case 10:e:{if(r=t.type._context,o=t.pendingProps,l=t.memoizedProps,Di(t,i=o.value),null!==l){var u=l.value;if(0===(i=Jt(u,i)?0:0|("function"==typeof r._calculateChangedBits?r._calculateChangedBits(u,i):1073741823))){if(l.children===o.children&&!Nr.current){t=Li(e,t,n);break e}}else for(null!==(u=t.child)&&(u.return=t);null!==u;){var c=u.contextDependencies;if(null!==c){l=u.child;for(var s=c.first;null!==s;){if(s.context===r&&0!=(s.observedBits&i)){1===u.tag&&((s=Yi(n)).tag=$i,Ji(u,s)),u.expirationTime<n&&(u.expirationTime=n),null!==(s=u.alternate)&&s.expirationTime<n&&(s.expirationTime=n),s=n;for(var f=u.return;null!==f;){var d=f.alternate;if(f.childExpirationTime<s)f.childExpirationTime=s,null!==d&&d.childExpirationTime<s&&(d.childExpirationTime=s);else{if(!(null!==d&&d.childExpirationTime<s))break;d.childExpirationTime=s}f=f.return}c.expirationTime<n&&(c.expirationTime=n);break}s=s.next}}else l=10===u.tag&&u.type===t.type?null:u.child;if(null!==l)l.return=u;else for(l=u;null!==l;){if(l===t){l=null;break}if(null!==(u=l.sibling)){u.return=l.return,l=u;break}l=l.return}u=l}}ki(e,t,o.children,n),t=t.child}return t;case 9:return o=t.type,r=(i=t.pendingProps).children,Wi(t,n),r=r(o=Bi(o,i.unstable_observedBits)),t.effectTag|=1,ki(e,t,r,n),t.child;case 14:return i=ro(o=t.type,t.pendingProps),Ti(e,t,o,i=ro(o.type,i),r,n);case 15:return Ei(e,t,t.type,t.pendingProps,r,n);case 17:return r=t.type,o=t.pendingProps,o=t.elementType===r?o:ro(r,o),null!==e&&(e.alternate=null,t.alternate=null,t.effectTag|=2),t.tag=1,Mr(r)?(e=!0,Dr(t)):e=!1,Wi(t,n),uo(t,r,o),so(t,r,o,n),Oi(null,t,r,!0,e,n)}a("156")}var Ui={current:null},Ii=null,Fi=null,Ai=null;function Di(e,t){var n=e.type._context;Cr(Ui,n._currentValue),n._currentValue=t}function zi(e){var t=Ui.current;Pr(Ui),e.type._context._currentValue=t}function Wi(e,t){Ii=e,Ai=Fi=null;var n=e.contextDependencies;null!==n&&n.expirationTime>=t&&(xi=!0),e.contextDependencies=null}function Bi(e,t){return Ai!==e&&!1!==t&&0!==t&&("number"==typeof t&&1073741823!==t||(Ai=e,t=1073741823),t={context:e,observedBits:t,next:null},null===Fi?(null===Ii&&a("308"),Fi=t,Ii.contextDependencies={first:t,expirationTime:0}):Fi=Fi.next=t),e._currentValue}var Hi=0,Vi=1,$i=2,qi=3,Ki=!1;function Qi(e){return{baseState:e,firstUpdate:null,lastUpdate:null,firstCapturedUpdate:null,lastCapturedUpdate:null,firstEffect:null,lastEffect:null,firstCapturedEffect:null,lastCapturedEffect:null}}function Gi(e){return{baseState:e.baseState,firstUpdate:e.firstUpdate,lastUpdate:e.lastUpdate,firstCapturedUpdate:null,lastCapturedUpdate:null,firstEffect:null,lastEffect:null,firstCapturedEffect:null,lastCapturedEffect:null}}function Yi(e){return{expirationTime:e,tag:Hi,payload:null,callback:null,next:null,nextEffect:null}}function Xi(e,t){null===e.lastUpdate?e.firstUpdate=e.lastUpdate=t:(e.lastUpdate.next=t,e.lastUpdate=t)}function Ji(e,t){var n=e.alternate;if(null===n){var r=e.updateQueue,o=null;null===r&&(r=e.updateQueue=Qi(e.memoizedState))}else r=e.updateQueue,o=n.updateQueue,null===r?null===o?(r=e.updateQueue=Qi(e.memoizedState),o=n.updateQueue=Qi(n.memoizedState)):r=e.updateQueue=Gi(o):null===o&&(o=n.updateQueue=Gi(r));null===o||r===o?Xi(r,t):null===r.lastUpdate||null===o.lastUpdate?(Xi(r,t),Xi(o,t)):(Xi(r,t),o.lastUpdate=t)}function Zi(e,t){var n=e.updateQueue;null===(n=null===n?e.updateQueue=Qi(e.memoizedState):ea(e,n)).lastCapturedUpdate?n.firstCapturedUpdate=n.lastCapturedUpdate=t:(n.lastCapturedUpdate.next=t,n.lastCapturedUpdate=t)}function ea(e,t){var n=e.alternate;return null!==n&&t===n.updateQueue&&(t=e.updateQueue=Gi(t)),t}function ta(e,t,n,r,i,a){switch(n.tag){case Vi:return"function"==typeof(e=n.payload)?e.call(a,r,i):e;case qi:e.effectTag=-2049&e.effectTag|64;case Hi:if(null==(i="function"==typeof(e=n.payload)?e.call(a,r,i):e))break;return o({},r,i);case $i:Ki=!0}return r}function na(e,t,n,r,o){Ki=!1;for(var i=(t=ea(e,t)).baseState,a=null,l=0,u=t.firstUpdate,c=i;null!==u;){var s=u.expirationTime;s<o?(null===a&&(a=u,i=c),l<s&&(l=s)):(c=ta(e,0,u,c,n,r),null!==u.callback&&(e.effectTag|=32,u.nextEffect=null,null===t.lastEffect?t.firstEffect=t.lastEffect=u:(t.lastEffect.nextEffect=u,t.lastEffect=u))),u=u.next}for(s=null,u=t.firstCapturedUpdate;null!==u;){var f=u.expirationTime;f<o?(null===s&&(s=u,null===a&&(i=c)),l<f&&(l=f)):(c=ta(e,0,u,c,n,r),null!==u.callback&&(e.effectTag|=32,u.nextEffect=null,null===t.lastCapturedEffect?t.firstCapturedEffect=t.lastCapturedEffect=u:(t.lastCapturedEffect.nextEffect=u,t.lastCapturedEffect=u))),u=u.next}null===a&&(t.lastUpdate=null),null===s?t.lastCapturedUpdate=null:e.effectTag|=32,null===a&&null===s&&(i=c),t.baseState=i,t.firstUpdate=a,t.firstCapturedUpdate=s,e.expirationTime=l,e.memoizedState=c}function ra(e,t,n){null!==t.firstCapturedUpdate&&(null!==t.lastUpdate&&(t.lastUpdate.next=t.firstCapturedUpdate,t.lastUpdate=t.lastCapturedUpdate),t.firstCapturedUpdate=t.lastCapturedUpdate=null),oa(t.firstEffect,n),t.firstEffect=t.lastEffect=null,oa(t.firstCapturedEffect,n),t.firstCapturedEffect=t.lastCapturedEffect=null}function oa(e,t){for(;null!==e;){var n=e.callback;if(null!==n){e.callback=null;var r=t;"function"!=typeof n&&a("191",n),n.call(r)}e=e.nextEffect}}function ia(e,t){return{value:e,source:t,stack:ut(t)}}function aa(e){e.effectTag|=4}var la=void 0,ua=void 0,ca=void 0,sa=void 0;la=function(e,t){for(var n=t.child;null!==n;){if(5===n.tag||6===n.tag)e.appendChild(n.stateNode);else if(4!==n.tag&&null!==n.child){n.child.return=n,n=n.child;continue}if(n===t)break;for(;null===n.sibling;){if(null===n.return||n.return===t)return;n=n.return}n.sibling.return=n.return,n=n.sibling}},ua=function(){},ca=function(e,t,n,r,i){var a=e.memoizedProps;if(a!==r){var l=t.stateNode;switch(xo(bo.current),e=null,n){case"input":a=bt(l,a),r=bt(l,r),e=[];break;case"option":a=Kn(l,a),r=Kn(l,r),e=[];break;case"select":a=o({},a,{value:void 0}),r=o({},r,{value:void 0}),e=[];break;case"textarea":a=Gn(l,a),r=Gn(l,r),e=[];break;default:"function"!=typeof a.onClick&&"function"==typeof r.onClick&&(l.onclick=hr)}fr(n,r),l=n=void 0;var u=null;for(n in a)if(!r.hasOwnProperty(n)&&a.hasOwnProperty(n)&&null!=a[n])if("style"===n){var c=a[n];for(l in c)c.hasOwnProperty(l)&&(u||(u={}),u[l]="")}else"dangerouslySetInnerHTML"!==n&&"children"!==n&&"suppressContentEditableWarning"!==n&&"suppressHydrationWarning"!==n&&"autoFocus"!==n&&(b.hasOwnProperty(n)?e||(e=[]):(e=e||[]).push(n,null));for(n in r){var s=r[n];if(c=null!=a?a[n]:void 0,r.hasOwnProperty(n)&&s!==c&&(null!=s||null!=c))if("style"===n)if(c){for(l in c)!c.hasOwnProperty(l)||s&&s.hasOwnProperty(l)||(u||(u={}),u[l]="");for(l in s)s.hasOwnProperty(l)&&c[l]!==s[l]&&(u||(u={}),u[l]=s[l])}else u||(e||(e=[]),e.push(n,u)),u=s;else"dangerouslySetInnerHTML"===n?(s=s?s.__html:void 0,c=c?c.__html:void 0,null!=s&&c!==s&&(e=e||[]).push(n,""+s)):"children"===n?c===s||"string"!=typeof s&&"number"!=typeof s||(e=e||[]).push(n,""+s):"suppressContentEditableWarning"!==n&&"suppressHydrationWarning"!==n&&(b.hasOwnProperty(n)?(null!=s&&pr(i,n),e||c===s||(e=[])):(e=e||[]).push(n,s))}u&&(e=e||[]).push("style",u),i=e,(t.updateQueue=i)&&aa(t)}},sa=function(e,t,n,r){n!==r&&aa(t)};var fa="function"==typeof WeakSet?WeakSet:Set;function da(e,t){var n=t.source,r=t.stack;null===r&&null!==n&&(r=ut(n)),null!==n&<(n.type),t=t.value,null!==e&&1===e.tag&<(e.type);try{console.error(t)}catch(o){setTimeout(function(){throw o})}}function pa(e){var t=e.ref;if(null!==t)if("function"==typeof t)try{t(null)}catch(n){Qa(e,n)}else t.current=null}function ha(e,t,n){if(null!==(n=null!==(n=n.updateQueue)?n.lastEffect:null)){var r=n=n.next;do{if((r.tag&e)!==Po){var o=r.destroy;r.destroy=void 0,void 0!==o&&o()}(r.tag&t)!==Po&&(o=r.create,r.destroy=o()),r=r.next}while(r!==n)}}function ma(e){switch("function"==typeof Br&&Br(e),e.tag){case 0:case 11:case 14:case 15:var t=e.updateQueue;if(null!==t&&null!==(t=t.lastEffect)){var n=t=t.next;do{var r=n.destroy;if(void 0!==r){var o=e;try{r()}catch(i){Qa(o,i)}}n=n.next}while(n!==t)}break;case 1:if(pa(e),"function"==typeof(t=e.stateNode).componentWillUnmount)try{t.props=e.memoizedProps,t.state=e.memoizedState,t.componentWillUnmount()}catch(i){Qa(e,i)}break;case 5:pa(e);break;case 4:ga(e)}}function va(e){return 5===e.tag||3===e.tag||4===e.tag}function ya(e){e:{for(var t=e.return;null!==t;){if(va(t)){var n=t;break e}t=t.return}a("160"),n=void 0}var r=t=void 0;switch(n.tag){case 5:t=n.stateNode,r=!1;break;case 3:case 4:t=n.stateNode.containerInfo,r=!0;break;default:a("161")}16&n.effectTag&&(ir(t,""),n.effectTag&=-17);e:t:for(n=e;;){for(;null===n.sibling;){if(null===n.return||va(n.return)){n=null;break e}n=n.return}for(n.sibling.return=n.return,n=n.sibling;5!==n.tag&&6!==n.tag&&18!==n.tag;){if(2&n.effectTag)continue t;if(null===n.child||4===n.tag)continue t;n.child.return=n,n=n.child}if(!(2&n.effectTag)){n=n.stateNode;break e}}for(var o=e;;){if(5===o.tag||6===o.tag)if(n)if(r){var i=t,l=o.stateNode,u=n;8===i.nodeType?i.parentNode.insertBefore(l,u):i.insertBefore(l,u)}else t.insertBefore(o.stateNode,n);else r?(l=t,u=o.stateNode,8===l.nodeType?(i=l.parentNode).insertBefore(u,l):(i=l).appendChild(u),null!=(l=l._reactRootContainer)||null!==i.onclick||(i.onclick=hr)):t.appendChild(o.stateNode);else if(4!==o.tag&&null!==o.child){o.child.return=o,o=o.child;continue}if(o===e)break;for(;null===o.sibling;){if(null===o.return||o.return===e)return;o=o.return}o.sibling.return=o.return,o=o.sibling}}function ga(e){for(var t=e,n=!1,r=void 0,o=void 0;;){if(!n){n=t.return;e:for(;;){switch(null===n&&a("160"),n.tag){case 5:r=n.stateNode,o=!1;break e;case 3:case 4:r=n.stateNode.containerInfo,o=!0;break e}n=n.return}n=!0}if(5===t.tag||6===t.tag){e:for(var i=t,l=i;;)if(ma(l),null!==l.child&&4!==l.tag)l.child.return=l,l=l.child;else{if(l===i)break;for(;null===l.sibling;){if(null===l.return||l.return===i)break e;l=l.return}l.sibling.return=l.return,l=l.sibling}o?(i=r,l=t.stateNode,8===i.nodeType?i.parentNode.removeChild(l):i.removeChild(l)):r.removeChild(t.stateNode)}else if(4===t.tag){if(null!==t.child){r=t.stateNode.containerInfo,o=!0,t.child.return=t,t=t.child;continue}}else if(ma(t),null!==t.child){t.child.return=t,t=t.child;continue}if(t===e)break;for(;null===t.sibling;){if(null===t.return||t.return===e)return;4===(t=t.return).tag&&(n=!1)}t.sibling.return=t.return,t=t.sibling}}function ba(e,t){switch(t.tag){case 0:case 11:case 14:case 15:ha(Ro,Oo,t);break;case 1:break;case 5:var n=t.stateNode;if(null!=n){var r=t.memoizedProps;e=null!==e?e.memoizedProps:r;var o=t.type,i=t.updateQueue;t.updateQueue=null,null!==i&&function(e,t,n,r,o){e[M]=o,"input"===n&&"radio"===o.type&&null!=o.name&&_t(e,o),dr(n,r),r=dr(n,o);for(var i=0;i<t.length;i+=2){var a=t[i],l=t[i+1];"style"===a?cr(e,l):"dangerouslySetInnerHTML"===a?or(e,l):"children"===a?ir(e,l):yt(e,a,l,r)}switch(n){case"input":xt(e,o);break;case"textarea":Xn(e,o);break;case"select":t=e._wrapperState.wasMultiple,e._wrapperState.wasMultiple=!!o.multiple,null!=(n=o.value)?Qn(e,!!o.multiple,n,!1):t!==!!o.multiple&&(null!=o.defaultValue?Qn(e,!!o.multiple,o.defaultValue,!0):Qn(e,!!o.multiple,o.multiple?[]:"",!1))}}(n,i,o,e,r)}break;case 6:null===t.stateNode&&a("162"),t.stateNode.nodeValue=t.memoizedProps;break;case 3:case 12:break;case 13:if(n=t.memoizedState,r=void 0,e=t,null===n?r=!1:(r=!0,e=t.child,0===n.timedOutAt&&(n.timedOutAt=xl())),null!==e&&function(e,t){for(var n=e;;){if(5===n.tag){var r=n.stateNode;if(t)r.style.display="none";else{r=n.stateNode;var o=n.memoizedProps.style;o=null!=o&&o.hasOwnProperty("display")?o.display:null,r.style.display=ur("display",o)}}else if(6===n.tag)n.stateNode.nodeValue=t?"":n.memoizedProps;else{if(13===n.tag&&null!==n.memoizedState){(r=n.child.sibling).return=n,n=r;continue}if(null!==n.child){n.child.return=n,n=n.child;continue}}if(n===e)break;for(;null===n.sibling;){if(null===n.return||n.return===e)return;n=n.return}n.sibling.return=n.return,n=n.sibling}}(e,r),null!==(n=t.updateQueue)){t.updateQueue=null;var l=t.stateNode;null===l&&(l=t.stateNode=new fa),n.forEach(function(e){var n=function(e,t){var n=e.stateNode;null!==n&&n.delete(t),t=Ga(t=xl(),e),null!==(e=Xa(e,t))&&(Zr(e,t),0!==(t=e.expirationTime)&&kl(e,t))}.bind(null,t,e);l.has(e)||(l.add(e),e.then(n,n))})}break;case 17:break;default:a("163")}}var wa="function"==typeof WeakMap?WeakMap:Map;function _a(e,t,n){(n=Yi(n)).tag=qi,n.payload={element:null};var r=t.value;return n.callback=function(){jl(r),da(e,t)},n}function xa(e,t,n){(n=Yi(n)).tag=qi;var r=e.type.getDerivedStateFromError;if("function"==typeof r){var o=t.value;n.payload=function(){return r(o)}}var i=e.stateNode;return null!==i&&"function"==typeof i.componentDidCatch&&(n.callback=function(){"function"!=typeof r&&(null===Aa?Aa=new Set([this]):Aa.add(this));var n=t.value,o=t.stack;da(e,t),this.componentDidCatch(n,{componentStack:null!==o?o:""})}),n}function ka(e){switch(e.tag){case 1:Mr(e.type)&&Ur();var t=e.effectTag;return 2048&t?(e.effectTag=-2049&t|64,e):null;case 3:return So(),Ir(),0!=(64&(t=e.effectTag))&&a("285"),e.effectTag=-2049&t|64,e;case 5:return Eo(e),null;case 13:return 2048&(t=e.effectTag)?(e.effectTag=-2049&t|64,e):null;case 18:return null;case 4:return So(),null;case 10:return zi(e),null;default:return null}}var Sa=Ve.ReactCurrentDispatcher,Ta=Ve.ReactCurrentOwner,Ea=1073741822,Pa=!1,Ca=null,Ra=null,Oa=0,Na=-1,ja=!1,La=null,Ma=!1,Ua=null,Ia=null,Fa=null,Aa=null;function Da(){if(null!==Ca)for(var e=Ca.return;null!==e;){var t=e;switch(t.tag){case 1:var n=t.type.childContextTypes;null!=n&&Ur();break;case 3:So(),Ir();break;case 5:Eo(t);break;case 4:So();break;case 10:zi(t)}e=e.return}Ra=null,Oa=0,Na=-1,ja=!1,Ca=null}function za(){for(;null!==La;){var e=La.effectTag;if(16&e&&ir(La.stateNode,""),128&e){var t=La.alternate;null!==t&&(null!==(t=t.ref)&&("function"==typeof t?t(null):t.current=null))}switch(14&e){case 2:ya(La),La.effectTag&=-3;break;case 6:ya(La),La.effectTag&=-3,ba(La.alternate,La);break;case 4:ba(La.alternate,La);break;case 8:ga(e=La),e.return=null,e.child=null,e.memoizedState=null,e.updateQueue=null,null!==(e=e.alternate)&&(e.return=null,e.child=null,e.memoizedState=null,e.updateQueue=null)}La=La.nextEffect}}function Wa(){for(;null!==La;){if(256&La.effectTag)e:{var e=La.alternate,t=La;switch(t.tag){case 0:case 11:case 15:ha(Co,Po,t);break e;case 1:if(256&t.effectTag&&null!==e){var n=e.memoizedProps,r=e.memoizedState;t=(e=t.stateNode).getSnapshotBeforeUpdate(t.elementType===t.type?n:ro(t.type,n),r),e.__reactInternalSnapshotBeforeUpdate=t}break e;case 3:case 5:case 6:case 4:case 17:break e;default:a("163")}}La=La.nextEffect}}function Ba(e,t){for(;null!==La;){var n=La.effectTag;if(36&n){var r=La.alternate,o=La,i=t;switch(o.tag){case 0:case 11:case 15:ha(No,jo,o);break;case 1:var l=o.stateNode;if(4&o.effectTag)if(null===r)l.componentDidMount();else{var u=o.elementType===o.type?r.memoizedProps:ro(o.type,r.memoizedProps);l.componentDidUpdate(u,r.memoizedState,l.__reactInternalSnapshotBeforeUpdate)}null!==(r=o.updateQueue)&&ra(0,r,l);break;case 3:if(null!==(r=o.updateQueue)){if(l=null,null!==o.child)switch(o.child.tag){case 5:l=o.child.stateNode;break;case 1:l=o.child.stateNode}ra(0,r,l)}break;case 5:i=o.stateNode,null===r&&4&o.effectTag&&yr(o.type,o.memoizedProps)&&i.focus();break;case 6:case 4:case 12:case 13:case 17:break;default:a("163")}}128&n&&(null!==(o=La.ref)&&(i=La.stateNode,"function"==typeof o?o(i):o.current=i)),512&n&&(Ua=e),La=La.nextEffect}}function Ha(){null!==Ia&&xr(Ia),null!==Fa&&Fa()}function Va(e,t){Ma=Pa=!0,e.current===t&&a("177");var n=e.pendingCommitExpirationTime;0===n&&a("261"),e.pendingCommitExpirationTime=0;var r=t.expirationTime,o=t.childExpirationTime;for(function(e,t){if(e.didError=!1,0===t)e.earliestPendingTime=0,e.latestPendingTime=0,e.earliestSuspendedTime=0,e.latestSuspendedTime=0,e.latestPingedTime=0;else{t<e.latestPingedTime&&(e.latestPingedTime=0);var n=e.latestPendingTime;0!==n&&(n>t?e.earliestPendingTime=e.latestPendingTime=0:e.earliestPendingTime>t&&(e.earliestPendingTime=e.latestPendingTime)),0===(n=e.earliestSuspendedTime)?Zr(e,t):t<e.latestSuspendedTime?(e.earliestSuspendedTime=0,e.latestSuspendedTime=0,e.latestPingedTime=0,Zr(e,t)):t>n&&Zr(e,t)}no(0,e)}(e,o>r?o:r),Ta.current=null,r=void 0,1<t.effectTag?null!==t.lastEffect?(t.lastEffect.nextEffect=t,r=t.firstEffect):r=t:r=t.firstEffect,mr=Sn,vr=function(){var e=In();if(Fn(e)){if("selectionStart"in e)var t={start:e.selectionStart,end:e.selectionEnd};else e:{var n=(t=(t=e.ownerDocument)&&t.defaultView||window).getSelection&&t.getSelection();if(n&&0!==n.rangeCount){t=n.anchorNode;var r=n.anchorOffset,o=n.focusNode;n=n.focusOffset;try{t.nodeType,o.nodeType}catch(p){t=null;break e}var i=0,a=-1,l=-1,u=0,c=0,s=e,f=null;t:for(;;){for(var d;s!==t||0!==r&&3!==s.nodeType||(a=i+r),s!==o||0!==n&&3!==s.nodeType||(l=i+n),3===s.nodeType&&(i+=s.nodeValue.length),null!==(d=s.firstChild);)f=s,s=d;for(;;){if(s===e)break t;if(f===t&&++u===r&&(a=i),f===o&&++c===n&&(l=i),null!==(d=s.nextSibling))break;f=(s=f).parentNode}s=d}t=-1===a||-1===l?null:{start:a,end:l}}else t=null}t=t||{start:0,end:0}}else t=null;return{focusedElem:e,selectionRange:t}}(),Sn=!1,La=r;null!==La;){o=!1;var l=void 0;try{Wa()}catch(c){o=!0,l=c}o&&(null===La&&a("178"),Qa(La,l),null!==La&&(La=La.nextEffect))}for(La=r;null!==La;){o=!1,l=void 0;try{za()}catch(c){o=!0,l=c}o&&(null===La&&a("178"),Qa(La,l),null!==La&&(La=La.nextEffect))}for(An(vr),vr=null,Sn=!!mr,mr=null,e.current=t,La=r;null!==La;){o=!1,l=void 0;try{Ba(e,n)}catch(c){o=!0,l=c}o&&(null===La&&a("178"),Qa(La,l),null!==La&&(La=La.nextEffect))}if(null!==r&&null!==Ua){var u=function(e,t){Fa=Ia=Ua=null;var n=ol;ol=!0;do{if(512&t.effectTag){var r=!1,o=void 0;try{var i=t;ha(Mo,Po,i),ha(Po,Lo,i)}catch(u){r=!0,o=u}r&&Qa(t,o)}t=t.nextEffect}while(null!==t);ol=n,0!==(n=e.expirationTime)&&kl(e,n),sl||ol||Cl(1073741823,!1)}.bind(null,e,r);Ia=i.unstable_runWithPriority(i.unstable_NormalPriority,function(){return _r(u)}),Fa=u}Pa=Ma=!1,"function"==typeof Wr&&Wr(t.stateNode),n=t.expirationTime,0===(t=(t=t.childExpirationTime)>n?t:n)&&(Aa=null),function(e,t){e.expirationTime=t,e.finishedWork=null}(e,t)}function $a(e){for(;;){var t=e.alternate,n=e.return,r=e.sibling;if(0==(1024&e.effectTag)){Ca=e;e:{var i=t,l=Oa,u=(t=e).pendingProps;switch(t.tag){case 2:case 16:break;case 15:case 0:break;case 1:Mr(t.type)&&Ur();break;case 3:So(),Ir(),(u=t.stateNode).pendingContext&&(u.context=u.pendingContext,u.pendingContext=null),null!==i&&null!==i.child||(bi(t),t.effectTag&=-3),ua(t);break;case 5:Eo(t);var c=xo(_o.current);if(l=t.type,null!==i&&null!=t.stateNode)ca(i,t,l,u,c),i.ref!==t.ref&&(t.effectTag|=128);else if(u){var s=xo(bo.current);if(bi(t)){i=(u=t).stateNode;var f=u.type,d=u.memoizedProps,p=c;switch(i[L]=u,i[M]=d,l=void 0,c=f){case"iframe":case"object":Tn("load",i);break;case"video":case"audio":for(f=0;f<te.length;f++)Tn(te[f],i);break;case"source":Tn("error",i);break;case"img":case"image":case"link":Tn("error",i),Tn("load",i);break;case"form":Tn("reset",i),Tn("submit",i);break;case"details":Tn("toggle",i);break;case"input":wt(i,d),Tn("invalid",i),pr(p,"onChange");break;case"select":i._wrapperState={wasMultiple:!!d.multiple},Tn("invalid",i),pr(p,"onChange");break;case"textarea":Yn(i,d),Tn("invalid",i),pr(p,"onChange")}for(l in fr(c,d),f=null,d)d.hasOwnProperty(l)&&(s=d[l],"children"===l?"string"==typeof s?i.textContent!==s&&(f=["children",s]):"number"==typeof s&&i.textContent!==""+s&&(f=["children",""+s]):b.hasOwnProperty(l)&&null!=s&&pr(p,l));switch(c){case"input":Be(i),kt(i,d,!0);break;case"textarea":Be(i),Jn(i);break;case"select":case"option":break;default:"function"==typeof d.onClick&&(i.onclick=hr)}l=f,u.updateQueue=l,(u=null!==l)&&aa(t)}else{d=t,p=l,i=u,f=9===c.nodeType?c:c.ownerDocument,s===Zn.html&&(s=er(p)),s===Zn.html?"script"===p?((i=f.createElement("div")).innerHTML="<script><\/script>",f=i.removeChild(i.firstChild)):"string"==typeof i.is?f=f.createElement(p,{is:i.is}):(f=f.createElement(p),"select"===p&&(p=f,i.multiple?p.multiple=!0:i.size&&(p.size=i.size))):f=f.createElementNS(s,p),(i=f)[L]=d,i[M]=u,la(i,t,!1,!1),p=i;var h=c,m=dr(f=l,d=u);switch(f){case"iframe":case"object":Tn("load",p),c=d;break;case"video":case"audio":for(c=0;c<te.length;c++)Tn(te[c],p);c=d;break;case"source":Tn("error",p),c=d;break;case"img":case"image":case"link":Tn("error",p),Tn("load",p),c=d;break;case"form":Tn("reset",p),Tn("submit",p),c=d;break;case"details":Tn("toggle",p),c=d;break;case"input":wt(p,d),c=bt(p,d),Tn("invalid",p),pr(h,"onChange");break;case"option":c=Kn(p,d);break;case"select":p._wrapperState={wasMultiple:!!d.multiple},c=o({},d,{value:void 0}),Tn("invalid",p),pr(h,"onChange");break;case"textarea":Yn(p,d),c=Gn(p,d),Tn("invalid",p),pr(h,"onChange");break;default:c=d}fr(f,c),s=void 0;var v=f,y=p,g=c;for(s in g)if(g.hasOwnProperty(s)){var w=g[s];"style"===s?cr(y,w):"dangerouslySetInnerHTML"===s?null!=(w=w?w.__html:void 0)&&or(y,w):"children"===s?"string"==typeof w?("textarea"!==v||""!==w)&&ir(y,w):"number"==typeof w&&ir(y,""+w):"suppressContentEditableWarning"!==s&&"suppressHydrationWarning"!==s&&"autoFocus"!==s&&(b.hasOwnProperty(s)?null!=w&&pr(h,s):null!=w&&yt(y,s,w,m))}switch(f){case"input":Be(p),kt(p,d,!1);break;case"textarea":Be(p),Jn(p);break;case"option":null!=d.value&&p.setAttribute("value",""+gt(d.value));break;case"select":(c=p).multiple=!!d.multiple,null!=(p=d.value)?Qn(c,!!d.multiple,p,!1):null!=d.defaultValue&&Qn(c,!!d.multiple,d.defaultValue,!0);break;default:"function"==typeof c.onClick&&(p.onclick=hr)}(u=yr(l,u))&&aa(t),t.stateNode=i}null!==t.ref&&(t.effectTag|=128)}else null===t.stateNode&&a("166");break;case 6:i&&null!=t.stateNode?sa(i,t,i.memoizedProps,u):("string"!=typeof u&&(null===t.stateNode&&a("166")),i=xo(_o.current),xo(bo.current),bi(t)?(l=(u=t).stateNode,i=u.memoizedProps,l[L]=u,(u=l.nodeValue!==i)&&aa(t)):(l=t,(u=(9===i.nodeType?i:i.ownerDocument).createTextNode(u))[L]=t,l.stateNode=u));break;case 11:break;case 13:if(u=t.memoizedState,0!=(64&t.effectTag)){t.expirationTime=l,Ca=t;break e}u=null!==u,l=null!==i&&null!==i.memoizedState,null!==i&&!u&&l&&(null!==(i=i.child.sibling)&&(null!==(c=t.firstEffect)?(t.firstEffect=i,i.nextEffect=c):(t.firstEffect=t.lastEffect=i,i.nextEffect=null),i.effectTag=8)),(u||l)&&(t.effectTag|=4);break;case 7:case 8:case 12:break;case 4:So(),ua(t);break;case 10:zi(t);break;case 9:case 14:break;case 17:Mr(t.type)&&Ur();break;case 18:break;default:a("156")}Ca=null}if(t=e,1===Oa||1!==t.childExpirationTime){for(u=0,l=t.child;null!==l;)(i=l.expirationTime)>u&&(u=i),(c=l.childExpirationTime)>u&&(u=c),l=l.sibling;t.childExpirationTime=u}if(null!==Ca)return Ca;null!==n&&0==(1024&n.effectTag)&&(null===n.firstEffect&&(n.firstEffect=e.firstEffect),null!==e.lastEffect&&(null!==n.lastEffect&&(n.lastEffect.nextEffect=e.firstEffect),n.lastEffect=e.lastEffect),1<e.effectTag&&(null!==n.lastEffect?n.lastEffect.nextEffect=e:n.firstEffect=e,n.lastEffect=e))}else{if(null!==(e=ka(e)))return e.effectTag&=1023,e;null!==n&&(n.firstEffect=n.lastEffect=null,n.effectTag|=1024)}if(null!==r)return r;if(null===n)break;e=n}return null}function qa(e){var t=Mi(e.alternate,e,Oa);return e.memoizedProps=e.pendingProps,null===t&&(t=$a(e)),Ta.current=null,t}function Ka(e,t){Pa&&a("243"),Ha(),Pa=!0;var n=Sa.current;Sa.current=ci;var r=e.nextExpirationTimeToWorkOn;r===Oa&&e===Ra&&null!==Ca||(Da(),Oa=r,Ca=Kr((Ra=e).current,null),e.pendingCommitExpirationTime=0);for(var o=!1;;){try{if(t)for(;null!==Ca&&!El();)Ca=qa(Ca);else for(;null!==Ca;)Ca=qa(Ca)}catch(y){if(Ai=Fi=Ii=null,Jo(),null===Ca)o=!0,jl(y);else{null===Ca&&a("271");var i=Ca,l=i.return;if(null!==l){e:{var u=e,c=l,s=i,f=y;if(l=Oa,s.effectTag|=1024,s.firstEffect=s.lastEffect=null,null!==f&&"object"==typeof f&&"function"==typeof f.then){var d=f;f=c;var p=-1,h=-1;do{if(13===f.tag){var m=f.alternate;if(null!==m&&null!==(m=m.memoizedState)){h=10*(1073741822-m.timedOutAt);break}"number"==typeof(m=f.pendingProps.maxDuration)&&(0>=m?p=0:(-1===p||m<p)&&(p=m))}f=f.return}while(null!==f);f=c;do{if((m=13===f.tag)&&(m=void 0!==f.memoizedProps.fallback&&null===f.memoizedState),m){if(null===(c=f.updateQueue)?((c=new Set).add(d),f.updateQueue=c):c.add(d),0==(1&f.mode)){f.effectTag|=64,s.effectTag&=-1957,1===s.tag&&(null===s.alternate?s.tag=17:((l=Yi(1073741823)).tag=$i,Ji(s,l))),s.expirationTime=1073741823;break e}c=l;var v=(s=u).pingCache;null===v?(v=s.pingCache=new wa,m=new Set,v.set(d,m)):void 0===(m=v.get(d))&&(m=new Set,v.set(d,m)),m.has(c)||(m.add(c),s=Ya.bind(null,s,d,c),d.then(s,s)),-1===p?u=1073741823:(-1===h&&(h=10*(1073741822-to(u,l))-5e3),u=h+p),0<=u&&Na<u&&(Na=u),f.effectTag|=2048,f.expirationTime=l;break e}f=f.return}while(null!==f);f=Error((lt(s.type)||"A React component")+" suspended while rendering, but no fallback UI was specified.\n\nAdd a <Suspense fallback=...> component higher in the tree to provide a loading indicator or placeholder to display."+ut(s))}ja=!0,f=ia(f,s),u=c;do{switch(u.tag){case 3:u.effectTag|=2048,u.expirationTime=l,Zi(u,l=_a(u,f,l));break e;case 1:if(p=f,h=u.type,s=u.stateNode,0==(64&u.effectTag)&&("function"==typeof h.getDerivedStateFromError||null!==s&&"function"==typeof s.componentDidCatch&&(null===Aa||!Aa.has(s)))){u.effectTag|=2048,u.expirationTime=l,Zi(u,l=xa(u,p,l));break e}}u=u.return}while(null!==u)}Ca=$a(i);continue}o=!0,jl(y)}}break}if(Pa=!1,Sa.current=n,Ai=Fi=Ii=null,Jo(),o)Ra=null,e.finishedWork=null;else if(null!==Ca)e.finishedWork=null;else{if(null===(n=e.current.alternate)&&a("281"),Ra=null,ja){if(o=e.latestPendingTime,i=e.latestSuspendedTime,l=e.latestPingedTime,0!==o&&o<r||0!==i&&i<r||0!==l&&l<r)return eo(e,r),void _l(e,n,r,e.expirationTime,-1);if(!e.didError&&t)return e.didError=!0,r=e.nextExpirationTimeToWorkOn=r,t=e.expirationTime=1073741823,void _l(e,n,r,t,-1)}t&&-1!==Na?(eo(e,r),(t=10*(1073741822-to(e,r)))<Na&&(Na=t),t=10*(1073741822-xl()),t=Na-t,_l(e,n,r,e.expirationTime,0>t?0:t)):(e.pendingCommitExpirationTime=r,e.finishedWork=n)}}function Qa(e,t){for(var n=e.return;null!==n;){switch(n.tag){case 1:var r=n.stateNode;if("function"==typeof n.type.getDerivedStateFromError||"function"==typeof r.componentDidCatch&&(null===Aa||!Aa.has(r)))return Ji(n,e=xa(n,e=ia(t,e),1073741823)),void Ja(n,1073741823);break;case 3:return Ji(n,e=_a(n,e=ia(t,e),1073741823)),void Ja(n,1073741823)}n=n.return}3===e.tag&&(Ji(e,n=_a(e,n=ia(t,e),1073741823)),Ja(e,1073741823))}function Ga(e,t){var n=i.unstable_getCurrentPriorityLevel(),r=void 0;if(0==(1&t.mode))r=1073741823;else if(Pa&&!Ma)r=Oa;else{switch(n){case i.unstable_ImmediatePriority:r=1073741823;break;case i.unstable_UserBlockingPriority:r=1073741822-10*(1+((1073741822-e+15)/10|0));break;case i.unstable_NormalPriority:r=1073741822-25*(1+((1073741822-e+500)/25|0));break;case i.unstable_LowPriority:case i.unstable_IdlePriority:r=1;break;default:a("313")}null!==Ra&&r===Oa&&--r}return n===i.unstable_UserBlockingPriority&&(0===ll||r<ll)&&(ll=r),r}function Ya(e,t,n){var r=e.pingCache;null!==r&&r.delete(t),null!==Ra&&Oa===n?Ra=null:(t=e.earliestSuspendedTime,r=e.latestSuspendedTime,0!==t&&n<=t&&n>=r&&(e.didError=!1,(0===(t=e.latestPingedTime)||t>n)&&(e.latestPingedTime=n),no(n,e),0!==(n=e.expirationTime)&&kl(e,n)))}function Xa(e,t){e.expirationTime<t&&(e.expirationTime=t);var n=e.alternate;null!==n&&n.expirationTime<t&&(n.expirationTime=t);var r=e.return,o=null;if(null===r&&3===e.tag)o=e.stateNode;else for(;null!==r;){if(n=r.alternate,r.childExpirationTime<t&&(r.childExpirationTime=t),null!==n&&n.childExpirationTime<t&&(n.childExpirationTime=t),null===r.return&&3===r.tag){o=r.stateNode;break}r=r.return}return o}function Ja(e,t){null!==(e=Xa(e,t))&&(!Pa&&0!==Oa&&t>Oa&&Da(),Zr(e,t),Pa&&!Ma&&Ra===e||kl(e,e.expirationTime),yl>vl&&(yl=0,a("185")))}function Za(e,t,n,r,o){return i.unstable_runWithPriority(i.unstable_ImmediatePriority,function(){return e(t,n,r,o)})}var el=null,tl=null,nl=0,rl=void 0,ol=!1,il=null,al=0,ll=0,ul=!1,cl=null,sl=!1,fl=!1,dl=null,pl=i.unstable_now(),hl=1073741822-(pl/10|0),ml=hl,vl=50,yl=0,gl=null;function bl(){hl=1073741822-((i.unstable_now()-pl)/10|0)}function wl(e,t){if(0!==nl){if(t<nl)return;null!==rl&&i.unstable_cancelCallback(rl)}nl=t,e=i.unstable_now()-pl,rl=i.unstable_scheduleCallback(Pl,{timeout:10*(1073741822-t)-e})}function _l(e,t,n,r,o){e.expirationTime=r,0!==o||El()?0<o&&(e.timeoutHandle=br(function(e,t,n){e.pendingCommitExpirationTime=n,e.finishedWork=t,bl(),ml=hl,Rl(e,n)}.bind(null,e,t,n),o)):(e.pendingCommitExpirationTime=n,e.finishedWork=t)}function xl(){return ol?ml:(Sl(),0!==al&&1!==al||(bl(),ml=hl),ml)}function kl(e,t){null===e.nextScheduledRoot?(e.expirationTime=t,null===tl?(el=tl=e,e.nextScheduledRoot=e):(tl=tl.nextScheduledRoot=e).nextScheduledRoot=el):t>e.expirationTime&&(e.expirationTime=t),ol||(sl?fl&&(il=e,al=1073741823,Ol(e,1073741823,!1)):1073741823===t?Cl(1073741823,!1):wl(e,t))}function Sl(){var e=0,t=null;if(null!==tl)for(var n=tl,r=el;null!==r;){var o=r.expirationTime;if(0===o){if((null===n||null===tl)&&a("244"),r===r.nextScheduledRoot){el=tl=r.nextScheduledRoot=null;break}if(r===el)el=o=r.nextScheduledRoot,tl.nextScheduledRoot=o,r.nextScheduledRoot=null;else{if(r===tl){(tl=n).nextScheduledRoot=el,r.nextScheduledRoot=null;break}n.nextScheduledRoot=r.nextScheduledRoot,r.nextScheduledRoot=null}r=n.nextScheduledRoot}else{if(o>e&&(e=o,t=r),r===tl)break;if(1073741823===e)break;n=r,r=r.nextScheduledRoot}}il=t,al=e}var Tl=!1;function El(){return!!Tl||!!i.unstable_shouldYield()&&(Tl=!0)}function Pl(){try{if(!El()&&null!==el){bl();var e=el;do{var t=e.expirationTime;0!==t&&hl<=t&&(e.nextExpirationTimeToWorkOn=hl),e=e.nextScheduledRoot}while(e!==el)}Cl(0,!0)}finally{Tl=!1}}function Cl(e,t){if(Sl(),t)for(bl(),ml=hl;null!==il&&0!==al&&e<=al&&!(Tl&&hl>al);)Ol(il,al,hl>al),Sl(),bl(),ml=hl;else for(;null!==il&&0!==al&&e<=al;)Ol(il,al,!1),Sl();if(t&&(nl=0,rl=null),0!==al&&wl(il,al),yl=0,gl=null,null!==dl)for(e=dl,dl=null,t=0;t<e.length;t++){var n=e[t];try{n._onComplete()}catch(r){ul||(ul=!0,cl=r)}}if(ul)throw e=cl,cl=null,ul=!1,e}function Rl(e,t){ol&&a("253"),il=e,al=t,Ol(e,t,!1),Cl(1073741823,!1)}function Ol(e,t,n){if(ol&&a("245"),ol=!0,n){var r=e.finishedWork;null!==r?Nl(e,r,t):(e.finishedWork=null,-1!==(r=e.timeoutHandle)&&(e.timeoutHandle=-1,wr(r)),Ka(e,n),null!==(r=e.finishedWork)&&(El()?e.finishedWork=r:Nl(e,r,t)))}else null!==(r=e.finishedWork)?Nl(e,r,t):(e.finishedWork=null,-1!==(r=e.timeoutHandle)&&(e.timeoutHandle=-1,wr(r)),Ka(e,n),null!==(r=e.finishedWork)&&Nl(e,r,t));ol=!1}function Nl(e,t,n){var r=e.firstBatch;if(null!==r&&r._expirationTime>=n&&(null===dl?dl=[r]:dl.push(r),r._defer))return e.finishedWork=t,void(e.expirationTime=0);e.finishedWork=null,e===gl?yl++:(gl=e,yl=0),i.unstable_runWithPriority(i.unstable_ImmediatePriority,function(){Va(e,t)})}function jl(e){null===il&&a("246"),il.expirationTime=0,ul||(ul=!0,cl=e)}function Ll(e,t){var n=sl;sl=!0;try{return e(t)}finally{(sl=n)||ol||Cl(1073741823,!1)}}function Ml(e,t){if(sl&&!fl){fl=!0;try{return e(t)}finally{fl=!1}}return e(t)}function Ul(e,t,n){sl||ol||0===ll||(Cl(ll,!1),ll=0);var r=sl;sl=!0;try{return i.unstable_runWithPriority(i.unstable_UserBlockingPriority,function(){return e(t,n)})}finally{(sl=r)||ol||Cl(1073741823,!1)}}function Il(e,t,n,r,o){var i=t.current;e:if(n){t:{2===tn(n=n._reactInternalFiber)&&1===n.tag||a("170");var l=n;do{switch(l.tag){case 3:l=l.stateNode.context;break t;case 1:if(Mr(l.type)){l=l.stateNode.__reactInternalMemoizedMergedChildContext;break t}}l=l.return}while(null!==l);a("171"),l=void 0}if(1===n.tag){var u=n.type;if(Mr(u)){n=Ar(n,u,l);break e}}n=l}else n=Rr;return null===t.context?t.context=n:t.pendingContext=n,t=o,(o=Yi(r)).payload={element:e},null!==(t=void 0===t?null:t)&&(o.callback=t),Ha(),Ji(i,o),Ja(i,r),r}function Fl(e,t,n,r){var o=t.current;return Il(e,t,n,o=Ga(xl(),o),r)}function Al(e){if(!(e=e.current).child)return null;switch(e.child.tag){case 5:default:return e.child.stateNode}}function Dl(e){var t=1073741822-25*(1+((1073741822-xl()+500)/25|0));t>=Ea&&(t=Ea-1),this._expirationTime=Ea=t,this._root=e,this._callbacks=this._next=null,this._hasChildren=this._didComplete=!1,this._children=null,this._defer=!0}function zl(){this._callbacks=null,this._didCommit=!1,this._onCommit=this._onCommit.bind(this)}function Wl(e,t,n){e={current:t=$r(3,null,null,t?3:0),containerInfo:e,pendingChildren:null,pingCache:null,earliestPendingTime:0,latestPendingTime:0,earliestSuspendedTime:0,latestSuspendedTime:0,latestPingedTime:0,didError:!1,pendingCommitExpirationTime:0,finishedWork:null,timeoutHandle:-1,context:null,pendingContext:null,hydrate:n,nextExpirationTimeToWorkOn:0,expirationTime:0,firstBatch:null,nextScheduledRoot:null},this._internalRoot=t.stateNode=e}function Bl(e){return!(!e||1!==e.nodeType&&9!==e.nodeType&&11!==e.nodeType&&(8!==e.nodeType||" react-mount-point-unstable "!==e.nodeValue))}function Hl(e,t,n,r,o){var i=n._reactRootContainer;if(i){if("function"==typeof o){var a=o;o=function(){var e=Al(i._internalRoot);a.call(e)}}null!=e?i.legacy_renderSubtreeIntoContainer(e,t,o):i.render(t,o)}else{if(i=n._reactRootContainer=function(e,t){if(t||(t=!(!(t=e?9===e.nodeType?e.documentElement:e.firstChild:null)||1!==t.nodeType||!t.hasAttribute("data-reactroot"))),!t)for(var n;n=e.lastChild;)e.removeChild(n);return new Wl(e,!1,t)}(n,r),"function"==typeof o){var l=o;o=function(){var e=Al(i._internalRoot);l.call(e)}}Ml(function(){null!=e?i.legacy_renderSubtreeIntoContainer(e,t,o):i.render(t,o)})}return Al(i._internalRoot)}function Vl(e,t){var n=2<arguments.length&&void 0!==arguments[2]?arguments[2]:null;return Bl(t)||a("200"),function(e,t,n){var r=3<arguments.length&&void 0!==arguments[3]?arguments[3]:null;return{$$typeof:Qe,key:null==r?null:""+r,children:e,containerInfo:t,implementation:n}}(e,t,null,n)}Ee=function(e,t,n){switch(t){case"input":if(xt(e,n),t=n.name,"radio"===n.type&&null!=t){for(n=e;n.parentNode;)n=n.parentNode;for(n=n.querySelectorAll("input[name="+JSON.stringify(""+t)+'][type="radio"]'),t=0;t<n.length;t++){var r=n[t];if(r!==e&&r.form===e.form){var o=A(r);o||a("90"),He(r),xt(r,o)}}}break;case"textarea":Xn(e,n);break;case"select":null!=(t=n.value)&&Qn(e,!!n.multiple,t,!1)}},Dl.prototype.render=function(e){this._defer||a("250"),this._hasChildren=!0,this._children=e;var t=this._root._internalRoot,n=this._expirationTime,r=new zl;return Il(e,t,null,n,r._onCommit),r},Dl.prototype.then=function(e){if(this._didComplete)e();else{var t=this._callbacks;null===t&&(t=this._callbacks=[]),t.push(e)}},Dl.prototype.commit=function(){var e=this._root._internalRoot,t=e.firstBatch;if(this._defer&&null!==t||a("251"),this._hasChildren){var n=this._expirationTime;if(t!==this){this._hasChildren&&(n=this._expirationTime=t._expirationTime,this.render(this._children));for(var r=null,o=t;o!==this;)r=o,o=o._next;null===r&&a("251"),r._next=o._next,this._next=t,e.firstBatch=this}this._defer=!1,Rl(e,n),t=this._next,this._next=null,null!==(t=e.firstBatch=t)&&t._hasChildren&&t.render(t._children)}else this._next=null,this._defer=!1},Dl.prototype._onComplete=function(){if(!this._didComplete){this._didComplete=!0;var e=this._callbacks;if(null!==e)for(var t=0;t<e.length;t++)(0,e[t])()}},zl.prototype.then=function(e){if(this._didCommit)e();else{var t=this._callbacks;null===t&&(t=this._callbacks=[]),t.push(e)}},zl.prototype._onCommit=function(){if(!this._didCommit){this._didCommit=!0;var e=this._callbacks;if(null!==e)for(var t=0;t<e.length;t++){var n=e[t];"function"!=typeof n&&a("191",n),n()}}},Wl.prototype.render=function(e,t){var n=this._internalRoot,r=new zl;return null!==(t=void 0===t?null:t)&&r.then(t),Fl(e,n,null,r._onCommit),r},Wl.prototype.unmount=function(e){var t=this._internalRoot,n=new zl;return null!==(e=void 0===e?null:e)&&n.then(e),Fl(null,t,null,n._onCommit),n},Wl.prototype.legacy_renderSubtreeIntoContainer=function(e,t,n){var r=this._internalRoot,o=new zl;return null!==(n=void 0===n?null:n)&&o.then(n),Fl(t,r,e,o._onCommit),o},Wl.prototype.createBatch=function(){var e=new Dl(this),t=e._expirationTime,n=this._internalRoot,r=n.firstBatch;if(null===r)n.firstBatch=e,e._next=null;else{for(n=null;null!==r&&r._expirationTime>=t;)n=r,r=r._next;e._next=r,null!==n&&(n._next=e)}return e},je=Ll,Le=Ul,Me=function(){ol||0===ll||(Cl(ll,!1),ll=0)};var $l={createPortal:Vl,findDOMNode:function(e){if(null==e)return null;if(1===e.nodeType)return e;var t=e._reactInternalFiber;return void 0===t&&("function"==typeof e.render?a("188"):a("268",Object.keys(e))),e=null===(e=rn(t))?null:e.stateNode},hydrate:function(e,t,n){return Bl(t)||a("200"),Hl(null,e,t,!0,n)},render:function(e,t,n){return Bl(t)||a("200"),Hl(null,e,t,!1,n)},unstable_renderSubtreeIntoContainer:function(e,t,n,r){return Bl(n)||a("200"),(null==e||void 0===e._reactInternalFiber)&&a("38"),Hl(e,t,n,!1,r)},unmountComponentAtNode:function(e){return Bl(e)||a("40"),!!e._reactRootContainer&&(Ml(function(){Hl(null,null,e,!1,function(){e._reactRootContainer=null})}),!0)},unstable_createPortal:function(){return Vl.apply(void 0,arguments)},unstable_batchedUpdates:Ll,unstable_interactiveUpdates:Ul,flushSync:function(e,t){ol&&a("187");var n=sl;sl=!0;try{return Za(e,t)}finally{sl=n,Cl(1073741823,!1)}},unstable_createRoot:function(e,t){return Bl(e)||a("299","unstable_createRoot"),new Wl(e,!0,null!=t&&!0===t.hydrate)},unstable_flushControlled:function(e){var t=sl;sl=!0;try{Za(e)}finally{(sl=t)||ol||Cl(1073741823,!1)}},__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED:{Events:[I,F,A,R.injectEventPluginsByName,g,V,function(e){E(e,H)},Oe,Ne,Cn,N]}};!function(e){var t=e.findFiberByHostInstance;(function(e){if("undefined"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__)return!1;var t=__REACT_DEVTOOLS_GLOBAL_HOOK__;if(t.isDisabled||!t.supportsFiber)return!0;try{var n=t.inject(e);Wr=Hr(function(e){return t.onCommitFiberRoot(n,e)}),Br=Hr(function(e){return t.onCommitFiberUnmount(n,e)})}catch(r){}})(o({},e,{overrideProps:null,currentDispatcherRef:Ve.ReactCurrentDispatcher,findHostInstanceByFiber:function(e){return null===(e=rn(e))?null:e.stateNode},findFiberByHostInstance:function(e){return t?t(e):null}}))}({findFiberByHostInstance:U,bundleType:0,version:"16.8.6",rendererPackageName:"react-dom"});var ql={default:$l},Kl=ql&&$l||ql;e.exports=Kl.default||Kl},function(e,t,n){"use strict";e.exports=n(125)},function(e,t,n){"use strict";(function(e){Object.defineProperty(t,"__esModule",{value:!0});var n=null,r=!1,o=3,i=-1,a=-1,l=!1,u=!1;function c(){if(!l){var e=n.expirationTime;u?k():u=!0,x(d,e)}}function s(){var e=n,t=n.next;if(n===t)n=null;else{var r=n.previous;n=r.next=t,t.previous=r}e.next=e.previous=null,r=e.callback,t=e.expirationTime,e=e.priorityLevel;var i=o,l=a;o=e,a=t;try{var u=r()}finally{o=i,a=l}if("function"==typeof u)if(u={callback:u,priorityLevel:e,expirationTime:t,next:null,previous:null},null===n)n=u.next=u.previous=u;else{r=null,e=n;do{if(e.expirationTime>=t){r=e;break}e=e.next}while(e!==n);null===r?r=n:r===n&&(n=u,c()),(t=r.previous).next=r.previous=u,u.next=r,u.previous=t}}function f(){if(-1===i&&null!==n&&1===n.priorityLevel){l=!0;try{do{s()}while(null!==n&&1===n.priorityLevel)}finally{l=!1,null!==n?c():u=!1}}}function d(e){l=!0;var o=r;r=e;try{if(e)for(;null!==n;){var i=t.unstable_now();if(!(n.expirationTime<=i))break;do{s()}while(null!==n&&n.expirationTime<=i)}else if(null!==n)do{s()}while(null!==n&&!S())}finally{l=!1,r=o,null!==n?c():u=!1,f()}}var p,h,m=Date,v="function"==typeof setTimeout?setTimeout:void 0,y="function"==typeof clearTimeout?clearTimeout:void 0,g="function"==typeof requestAnimationFrame?requestAnimationFrame:void 0,b="function"==typeof cancelAnimationFrame?cancelAnimationFrame:void 0;function w(e){p=g(function(t){y(h),e(t)}),h=v(function(){b(p),e(t.unstable_now())},100)}if("object"==typeof performance&&"function"==typeof performance.now){var _=performance;t.unstable_now=function(){return _.now()}}else t.unstable_now=function(){return m.now()};var x,k,S,T=null;if("undefined"!=typeof window?T=window:void 0!==e&&(T=e),T&&T._schedMock){var E=T._schedMock;x=E[0],k=E[1],S=E[2],t.unstable_now=E[3]}else if("undefined"==typeof window||"function"!=typeof MessageChannel){var P=null,C=function(e){if(null!==P)try{P(e)}finally{P=null}};x=function(e){null!==P?setTimeout(x,0,e):(P=e,setTimeout(C,0,!1))},k=function(){P=null},S=function(){return!1}}else{"undefined"!=typeof console&&("function"!=typeof g&&console.error("This browser doesn't support requestAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"),"function"!=typeof b&&console.error("This browser doesn't support cancelAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"));var R=null,O=!1,N=-1,j=!1,L=!1,M=0,U=33,I=33;S=function(){return M<=t.unstable_now()};var F=new MessageChannel,A=F.port2;F.port1.onmessage=function(){O=!1;var e=R,n=N;R=null,N=-1;var r=t.unstable_now(),o=!1;if(0>=M-r){if(!(-1!==n&&n<=r))return j||(j=!0,w(D)),R=e,void(N=n);o=!0}if(null!==e){L=!0;try{e(o)}finally{L=!1}}};var D=function(e){if(null!==R){w(D);var t=e-M+I;t<I&&U<I?(8>t&&(t=8),I=t<U?U:t):U=t,M=e+I,O||(O=!0,A.postMessage(void 0))}else j=!1};x=function(e,t){R=e,N=t,L||0>t?A.postMessage(void 0):j||(j=!0,w(D))},k=function(){R=null,O=!1,N=-1}}t.unstable_ImmediatePriority=1,t.unstable_UserBlockingPriority=2,t.unstable_NormalPriority=3,t.unstable_IdlePriority=5,t.unstable_LowPriority=4,t.unstable_runWithPriority=function(e,n){switch(e){case 1:case 2:case 3:case 4:case 5:break;default:e=3}var r=o,a=i;o=e,i=t.unstable_now();try{return n()}finally{o=r,i=a,f()}},t.unstable_next=function(e){switch(o){case 1:case 2:case 3:var n=3;break;default:n=o}var r=o,a=i;o=n,i=t.unstable_now();try{return e()}finally{o=r,i=a,f()}},t.unstable_scheduleCallback=function(e,r){var a=-1!==i?i:t.unstable_now();if("object"==typeof r&&null!==r&&"number"==typeof r.timeout)r=a+r.timeout;else switch(o){case 1:r=a+-1;break;case 2:r=a+250;break;case 5:r=a+1073741823;break;case 4:r=a+1e4;break;default:r=a+5e3}if(e={callback:e,priorityLevel:o,expirationTime:r,next:null,previous:null},null===n)n=e.next=e.previous=e,c();else{a=null;var l=n;do{if(l.expirationTime>r){a=l;break}l=l.next}while(l!==n);null===a?a=n:a===n&&(n=e,c()),(r=a.previous).next=a.previous=e,e.next=a,e.previous=r}return e},t.unstable_cancelCallback=function(e){var t=e.next;if(null!==t){if(t===e)n=null;else{e===n&&(n=t);var r=e.previous;r.next=t,t.previous=r}e.next=e.previous=null}},t.unstable_wrapCallback=function(e){var n=o;return function(){var r=o,a=i;o=n,i=t.unstable_now();try{return e.apply(this,arguments)}finally{o=r,i=a,f()}}},t.unstable_getCurrentPriorityLevel=function(){return o},t.unstable_shouldYield=function(){return!r&&(null!==n&&n.expirationTime<a||S())},t.unstable_continueExecution=function(){null!==n&&c()},t.unstable_pauseExecution=function(){},t.unstable_getFirstCallbackNode=function(){return n}}).call(this,n(74))},function(e,t,n){"use strict";var r=n(127);function o(){}function i(){}i.resetWarningCache=o,e.exports=function(){function e(e,t,n,o,i,a){if(a!==r){var l=new Error("Calling PropTypes validators directly is not supported by the `prop-types` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");throw l.name="Invariant Violation",l}}function t(){return e}e.isRequired=e;var n={array:e,bool:e,func:e,number:e,object:e,string:e,symbol:e,any:e,arrayOf:t,element:e,elementType:e,instanceOf:t,node:e,objectOf:t,oneOf:t,oneOfType:t,shape:t,exact:t,checkPropTypes:i,resetWarningCache:o};return n.PropTypes=n,n}},function(e,t,n){"use strict";e.exports="SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=void 0;var o=r(n(7)),i=r(n(35)),a=r(n(51)),l=r(n(0)),u=r(n(129)),c=r(n(4)),s=n(136),f=r(n(137)),d={shouldUpdateScroll:c.default.func,children:c.default.element.isRequired,location:c.default.object.isRequired},p={scrollBehavior:c.default.object.isRequired},h=function(e){function t(t,n){var r;return r=e.call(this,t,n)||this,(0,a.default)((0,i.default)((0,i.default)(r)),"shouldUpdateScroll",function(e,t){var n=r.props.shouldUpdateScroll;return!n||n.call(r.scrollBehavior,e,t)}),(0,a.default)((0,i.default)((0,i.default)(r)),"registerElement",function(e,t,n){r.scrollBehavior.registerElement(e,t,n,r.getRouterProps())}),(0,a.default)((0,i.default)((0,i.default)(r)),"unregisterElement",function(e){r.scrollBehavior.unregisterElement(e)}),r.scrollBehavior=new u.default({addTransitionHook:s.globalHistory.listen,stateStorage:new f.default,getCurrentLocation:function(){return r.props.location},shouldUpdateScroll:r.shouldUpdateScroll}),r}(0,o.default)(t,e);var n=t.prototype;return n.getChildContext=function(){return{scrollBehavior:this}},n.componentDidUpdate=function(e){var t=this.props.location;if(t!==e.location){var n={location:e.location};window.__navigatingToLink?t.action="PUSH":t.action="POP",this.scrollBehavior.updateScroll(n,{history:s.globalHistory,location:t})}},n.componentWillUnmount=function(){this.scrollBehavior.stop()},n.getRouterProps=function(){return{location:this.props.location,history:s.globalHistory}},n.render=function(){return l.default.Children.only(this.props.children)},t}(l.default.Component);h.propTypes=d,h.childContextTypes=p;var m=h;t.default=m},function(e,t,n){"use strict";t.__esModule=!0;var r=s(n(130)),o=s(n(131)),i=s(n(132)),a=s(n(133)),l=s(n(134)),u=s(n(9)),c=n(135);function s(e){return e&&e.__esModule?e:{default:e}}var f=2,d=function(){function e(t){var n=this,r=t.addTransitionHook,u=t.stateStorage,s=t.getCurrentLocation,d=t.shouldUpdateScroll;if(function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,e),this._restoreScrollRestoration=function(){if(n._oldScrollRestoration)try{window.history.scrollRestoration=n._oldScrollRestoration}catch(e){}},this._onWindowScroll=function(){if(n._saveWindowPositionHandle||(n._saveWindowPositionHandle=(0,l.default)(n._saveWindowPosition)),n._windowScrollTarget){var e=n._windowScrollTarget,t=e[0],r=e[1],o=(0,i.default)(window),u=(0,a.default)(window);o===t&&u===r&&(n._windowScrollTarget=null,n._cancelCheckWindowScroll())}},this._saveWindowPosition=function(){n._saveWindowPositionHandle=null,n._savePosition(null,window)},this._checkWindowScrollPosition=function(){n._checkWindowScrollHandle=null,n._windowScrollTarget&&(n.scrollToTarget(window,n._windowScrollTarget),++n._numWindowScrollAttempts,n._numWindowScrollAttempts>=f?n._windowScrollTarget=null:n._checkWindowScrollHandle=(0,l.default)(n._checkWindowScrollPosition))},this._stateStorage=u,this._getCurrentLocation=s,this._shouldUpdateScroll=d,"scrollRestoration"in window.history&&!(0,c.isMobileSafari)()){this._oldScrollRestoration=window.history.scrollRestoration;try{window.history.scrollRestoration="manual",(0,o.default)(window,"beforeunload",this._restoreScrollRestoration)}catch(p){this._oldScrollRestoration=null}}else this._oldScrollRestoration=null;this._saveWindowPositionHandle=null,this._checkWindowScrollHandle=null,this._windowScrollTarget=null,this._numWindowScrollAttempts=0,this._scrollElements={},(0,o.default)(window,"scroll",this._onWindowScroll),this._removeTransitionHook=r(function(){l.default.cancel(n._saveWindowPositionHandle),n._saveWindowPositionHandle=null,Object.keys(n._scrollElements).forEach(function(e){var t=n._scrollElements[e];l.default.cancel(t.savePositionHandle),t.savePositionHandle=null,n._saveElementPosition(e)})})}return e.prototype.registerElement=function(e,t,n,r){var i=this;this._scrollElements[e]&&(0,u.default)(!1);var a=function(){i._saveElementPosition(e)},c={element:t,shouldUpdateScroll:n,savePositionHandle:null,onScroll:function(){c.savePositionHandle||(c.savePositionHandle=(0,l.default)(a))}};this._scrollElements[e]=c,(0,o.default)(t,"scroll",c.onScroll),this._updateElementScroll(e,null,r)},e.prototype.unregisterElement=function(e){this._scrollElements[e]||(0,u.default)(!1);var t=this._scrollElements[e],n=t.element,o=t.onScroll,i=t.savePositionHandle;(0,r.default)(n,"scroll",o),l.default.cancel(i),delete this._scrollElements[e]},e.prototype.updateScroll=function(e,t){var n=this;this._updateWindowScroll(e,t),Object.keys(this._scrollElements).forEach(function(r){n._updateElementScroll(r,e,t)})},e.prototype.stop=function(){this._restoreScrollRestoration(),(0,r.default)(window,"scroll",this._onWindowScroll),this._cancelCheckWindowScroll(),this._removeTransitionHook()},e.prototype._cancelCheckWindowScroll=function(){l.default.cancel(this._checkWindowScrollHandle),this._checkWindowScrollHandle=null},e.prototype._saveElementPosition=function(e){var t=this._scrollElements[e];t.savePositionHandle=null,this._savePosition(e,t.element)},e.prototype._savePosition=function(e,t){this._stateStorage.save(this._getCurrentLocation(),e,[(0,i.default)(t),(0,a.default)(t)])},e.prototype._updateWindowScroll=function(e,t){this._cancelCheckWindowScroll(),this._windowScrollTarget=this._getScrollTarget(null,this._shouldUpdateScroll,e,t),this._numWindowScrollAttempts=0,this._checkWindowScrollPosition()},e.prototype._updateElementScroll=function(e,t,n){var r=this._scrollElements[e],o=r.element,i=r.shouldUpdateScroll,a=this._getScrollTarget(e,i,t,n);a&&this.scrollToTarget(o,a)},e.prototype._getDefaultScrollTarget=function(e){var t=e.hash;return t&&"#"!==t?"#"===t.charAt(0)?t.slice(1):t:[0,0]},e.prototype._getScrollTarget=function(e,t,n,r){var o=!t||t.call(this,n,r);if(!o||Array.isArray(o)||"string"==typeof o)return o;var i=this._getCurrentLocation();return this._getSavedScrollTarget(e,i)||this._getDefaultScrollTarget(i)},e.prototype._getSavedScrollTarget=function(e,t){return"PUSH"===t.action?null:this._stateStorage.read(t,e)},e.prototype.scrollToTarget=function(e,t){if("string"==typeof t){var n=document.getElementById(t)||document.getElementsByName(t)[0];if(n)return void n.scrollIntoView();t=[0,0]}var r=t,o=r[0],l=r[1];(0,i.default)(e,o),(0,a.default)(e,l)},e}();t.default=d,e.exports=t.default},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=void 0;var o=function(){};r(n(52)).default&&(o=document.addEventListener?function(e,t,n,r){return e.removeEventListener(t,n,r||!1)}:document.attachEvent?function(e,t,n){return e.detachEvent("on"+t,n)}:void 0);var i=o;t.default=i,e.exports=t.default},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=void 0;var o=function(){};r(n(52)).default&&(o=document.addEventListener?function(e,t,n,r){return e.addEventListener(t,n,r||!1)}:document.attachEvent?function(e,t,n){return e.attachEvent("on"+t,function(t){(t=t||window.event).target=t.target||t.srcElement,t.currentTarget=e,n.call(e,t)})}:void 0);var i=o;t.default=i,e.exports=t.default},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=function(e,t){var n=(0,o.default)(e);if(void 0===t)return n?"pageXOffset"in n?n.pageXOffset:n.document.documentElement.scrollLeft:e.scrollLeft;n?n.scrollTo(t,"pageYOffset"in n?n.pageYOffset:n.document.documentElement.scrollTop):e.scrollLeft=t};var o=r(n(69));e.exports=t.default},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=function(e,t){var n=(0,o.default)(e);if(void 0===t)return n?"pageYOffset"in n?n.pageYOffset:n.document.documentElement.scrollTop:e.scrollTop;n?n.scrollTo("pageXOffset"in n?n.pageXOffset:n.document.documentElement.scrollLeft,t):e.scrollTop=t};var o=r(n(69));e.exports=t.default},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=void 0;var o,i=r(n(52)),a="clearTimeout",l=function(e){var t=(new Date).getTime(),n=Math.max(0,16-(t-c)),r=setTimeout(e,n);return c=t,r},u=function(e,t){return e+(e?t[0].toUpperCase()+t.substr(1):t)+"AnimationFrame"};i.default&&["","webkit","moz","o","ms"].some(function(e){var t=u(e,"request");if(t in window)return a=u(e,"cancel"),l=function(e){return window[t](e)}});var c=(new Date).getTime();(o=function(e){return l(e)}).cancel=function(e){window[a]&&"function"==typeof window[a]&&window[a](e)};var s=o;t.default=s,e.exports=t.default},function(e,t,n){"use strict";t.__esModule=!0,t.isMobileSafari=function(){return/iPad|iPhone|iPod/.test(window.navigator.platform)&&/^((?!CriOS).)*Safari/.test(window.navigator.userAgent)}},function(e,t,n){"use strict";t.__esModule=!0;var r=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},o=function(e){return r({},e.location,{state:e.history.state,key:e.history.state&&e.history.state.key||"initial"})},i=function(e,t){var n=[],i=o(e),a=!1,l=function(){};return{get location(){return i},get transitioning(){return a},_onTransitionComplete:function(){a=!1,l()},listen:function(t){n.push(t);var r=function(){i=o(e),t({location:i,action:"POP"})};return e.addEventListener("popstate",r),function(){e.removeEventListener("popstate",r),n=n.filter(function(e){return e!==t})}},navigate:function(t){var u=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},c=u.state,s=u.replace,f=void 0!==s&&s;c=r({},c,{key:Date.now()+""});try{a||f?e.history.replaceState(c,null,t):e.history.pushState(c,null,t)}catch(p){e.location[f?"replace":"assign"](t)}i=o(e),a=!0;var d=new Promise(function(e){return l=e});return n.forEach(function(e){return e({location:i,action:"PUSH"})}),d}}},a=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"/",t=0,n=[{pathname:e,search:""}],r=[];return{get location(){return n[t]},addEventListener:function(e,t){},removeEventListener:function(e,t){},history:{get entries(){return n},get index(){return t},get state(){return r[t]},pushState:function(e,o,i){var a=i.split("?"),l=a[0],u=a[1],c=void 0===u?"":u;t++,n.push({pathname:l,search:c}),r.push(e)},replaceState:function(e,o,i){var a=i.split("?"),l=a[0],u=a[1],c=void 0===u?"":u;n[t]={pathname:l,search:c},r[t]=e}}}},l=i(!("undefined"==typeof window||!window.document||!window.document.createElement)?window:a()),u=l.navigate;t.globalHistory=l,t.navigate=u,t.createHistory=i,t.createMemorySource=a},function(e,t,n){"use strict";t.__esModule=!0,t.default=void 0;var r=function(){function e(){}var t=e.prototype;return t.read=function(e,t){var n=this.getStateKey(e,t);try{var r=window.sessionStorage.getItem(n);return JSON.parse(r)}catch(o){return window&&window.___GATSBY_REACT_ROUTER_SCROLL&&window.___GATSBY_REACT_ROUTER_SCROLL[n]?window.___GATSBY_REACT_ROUTER_SCROLL[n]:{}}},t.save=function(e,t,n){var r=this.getStateKey(e,t),o=JSON.stringify(n);try{window.sessionStorage.setItem(r,o)}catch(i){window&&window.___GATSBY_REACT_ROUTER_SCROLL?window.___GATSBY_REACT_ROUTER_SCROLL[r]=JSON.parse(o):(window.___GATSBY_REACT_ROUTER_SCROLL={},window.___GATSBY_REACT_ROUTER_SCROLL[r]=JSON.parse(o))}},t.getStateKey=function(e,t){var n="@@scroll|"+(e.key||e.pathname);return null==t?n:n+"|"+t},e}();t.default=r},function(e,t,n){"use strict";var r=n(8);t.__esModule=!0,t.default=void 0;var o=r(n(7)),i=r(n(35)),a=r(n(51)),l=r(n(0)),u=r(n(53)),c=r(n(68)),s=r(n(4)),f={scrollKey:s.default.string.isRequired,shouldUpdateScroll:s.default.func,children:s.default.element.isRequired},d={scrollBehavior:s.default.object},p=function(e){function t(t,n){var r;return r=e.call(this,t,n)||this,(0,a.default)((0,i.default)((0,i.default)(r)),"shouldUpdateScroll",function(e,t){var n=r.props.shouldUpdateScroll;return!n||n.call(r.context.scrollBehavior.scrollBehavior,e,t)}),r.scrollKey=t.scrollKey,r}(0,o.default)(t,e);var n=t.prototype;return n.componentDidMount=function(){this.context.scrollBehavior.registerElement(this.props.scrollKey,u.default.findDOMNode(this),this.shouldUpdateScroll)},n.componentDidUpdate=function(e){(0,c.default)(e.scrollKey===this.props.scrollKey,"<ScrollContainer> does not support changing scrollKey.")},n.componentWillUnmount=function(){this.context.scrollBehavior.unregisterElement(this.scrollKey)},n.render=function(){return this.props.children},t}(l.default.Component);p.propTypes=f,p.contextTypes=d;var h=p;t.default=h},function(e,t,n){"use strict";t.__esModule=!0,t.parsePath=function(e){var t=e||"/",n="",r="",o=t.indexOf("#");-1!==o&&(r=t.substr(o),t=t.substr(0,o));var i=t.indexOf("?");-1!==i&&(n=t.substr(i),t=t.substr(0,i));return{pathname:t,search:"?"===n?"":n,hash:"#"===r?"":r}}},function(e,t,n){"use strict";n.r(t);var r=n(1);"https:"!==window.location.protocol&&"localhost"!==window.location.hostname?console.error("Service workers can only be used over HTTPS, or on localhost for development"):"serviceWorker"in navigator&&navigator.serviceWorker.register("/team-report-a-cybercrime/sw.js").then(function(e){e.addEventListener("updatefound",function(){Object(r.apiRunner)("onServiceWorkerUpdateFound",{serviceWorker:e});var t=e.installing;console.log("installingWorker",t),t.addEventListener("statechange",function(){switch(t.state){case"installed":navigator.serviceWorker.controller?(window.___swUpdated=!0,Object(r.apiRunner)("onServiceWorkerUpdateReady",{serviceWorker:e}),window.___failedResources&&(console.log("resources failed, SW updated - reloading"),window.location.reload())):(console.log("Content is now available offline!"),Object(r.apiRunner)("onServiceWorkerInstalled",{serviceWorker:e}));break;case"redundant":console.error("The installing service worker became redundant."),Object(r.apiRunner)("onServiceWorkerRedundant",{serviceWorker:e});break;case"activated":Object(r.apiRunner)("onServiceWorkerActive",{serviceWorker:e})}})})}).catch(function(e){console.error("Error during service worker registration:",e)})},function(e,t,n){"use strict";n.r(t);n(77),n(84),n(34);var r=n(7),o=n.n(r),i=n(1),a=n(0),l=n.n(a),u=n(53),c=n.n(u),s=n(17),f=n(24),d=n(71),p=n(72),h=n.n(p),m=(n(50),n(4)),v=n.n(m),y=n(2),g=n(73),b=n(10),w=n(33),_=g.reduce(function(e,t){return e[t.fromPath]=t,e},{});function x(e){var t=_[e];return null!=t&&(window.___replace(t.toPath),!0)}var k=function(e,t){x(e.pathname)||Object(i.apiRunner)("onPreRouteUpdate",{location:e,prevLocation:t})},S=function(e,t){x(e.pathname)||(Object(i.apiRunner)("onRouteUpdate",{location:e,prevLocation:t}),window.__navigatingToLink=!1)},T=function(e,t){void 0===t&&(t={}),t.replace||(window.__navigatingToLink=!0);var n=Object(w.parsePath)(e).pathname,r=_[n];if(r&&(e=r.toPath,n=Object(w.parsePath)(e).pathname),window.___swUpdated)window.location=n;else{var o=setTimeout(function(){b.a.emit("onDelayedLoadPageResources",{pathname:n}),Object(i.apiRunner)("onRouteUpdateDelayed",{location:window.location})},1e3);y.default.getResourcesForPathname(n).then(function(n){Object(s.navigate)(e,t),clearTimeout(o)})}};function E(e,t){var n=this,r=t.location,o=r.pathname,a=r.hash,l=Object(i.apiRunner)("shouldUpdateScroll",{prevRouterProps:e,pathname:o,routerProps:{location:r},getSavedScrollPosition:function(e){return n._stateStorage.read(e)}});if(l.length>0)return l[0];if(e&&e.location.pathname===o)return a?a.slice(1):[0,0];return!0}var P=function(e){function t(t){var n;return n=e.call(this,t)||this,k(t.location,null),n}o()(t,e);var n=t.prototype;return n.componentDidMount=function(){S(this.props.location,null)},n.componentDidUpdate=function(e,t,n){n&&S(this.props.location,e.location)},n.getSnapshotBeforeUpdate=function(e){return this.props.location.pathname!==e.location.pathname&&(k(this.props.location,e.location),!0)},n.render=function(){return this.props.children},t}(l.a.Component);P.propTypes={location:v.a.object.isRequired};var C=n(55),R=n(54),O=n.n(R);function N(e,t){for(var n in e)if(!(n in t))return!0;for(var r in t)if(e[r]!==t[r])return!0;return!1}var j=!0,L=function(e){function t(t){var n;n=e.call(this)||this;var r=t.location;return n.state={location:Object.assign({},r),pageResources:y.default.getResourcesForPathnameSync(r.pathname)},n}o()(t,e);var n=t.prototype;return n.reloadPage=function(e){var t=window.location.href;window.history.replaceState({},"",e),window.location.replace(t)},t.getDerivedStateFromProps=function(e,t){var n=e.location;return t.location!==n?{pageResources:y.default.getResourcesForPathnameSync(n.pathname),location:Object.assign({},n)}:null},n.hasResources=function(e){return!(!e||!e.json)},n.retryResources=function(e){var t=this,n=e.location.pathname;if(!y.default.getResourcesForPathnameSync(n)){var r=this.props.location;this.nextLocation=e.location,y.default.getResourcesForPathname(n).then(function(n){t.nextLocation===e.location&&(t.hasResources(n)?t.setState({location:Object.assign({},window.location),pageResources:n}):t.reloadPage(r.href))})}},n.shouldComponentUpdate=function(e,t){return this.hasResources(t.pageResources)?this.state.pageResources!==t.pageResources||(this.state.pageResources.component!==t.pageResources.component||(this.state.pageResources.json!==t.pageResources.json||(!(this.state.location.key===t.location.key||!t.pageResources.page||!t.pageResources.page.matchPath&&!t.pageResources.page.path)||function(e,t,n){return N(e.props,t)||N(e.state,n)}(this,e,t)))):(this.retryResources(e),!1)},n.render=function(){if(!this.hasResources(this.state.pageResources)&&j)throw window.___failedResources=!0,new Error("Missing resources for "+this.state.location.pathname);return j=!1,this.props.children(this.state)},t}(l.a.Component);L.propTypes={location:v.a.object.isRequired,pageResources:v.a.object};var M,U=L;window.asyncRequires=O.a,window.___emitter=b.a,window.___loader=y.default,y.default.addPagesArray([window.page]),y.default.addDataPaths(((M={})[window.page.jsonName]=window.dataPath,M)),y.default.addProdRequires(O.a),Object(y.setApiRunnerForLoader)(i.apiRunner),window.__navigatingToLink=!1,window.___loader=y.default,window.___push=function(e){return T(e,{replace:!1})},window.___replace=function(e){return T(e,{replace:!0})},window.___navigate=function(e,t){return T(e,t)},x(window.location.pathname),Object(i.apiRunnerAsync)("onClientEntry").then(function(){Object(i.apiRunner)("registerServiceWorker").length>0&&n(140);var e=function(e){function t(){return e.apply(this,arguments)||this}return o()(t,e),t.prototype.render=function(){var e=this,t=this.props.location;return l.a.createElement(U,{location:t},function(t){var n=t.pageResources,r=t.location;return l.a.createElement(P,{location:r},l.a.createElement(d.ScrollContext,{location:r,shouldUpdateScroll:E},l.a.createElement(C.a,Object.assign({},e.props,{location:r,pageResources:n},n.json))))})},t}(l.a.Component),t=window,r=t.page,u=t.location;!r||"/team-report-a-cybercrime"+r.path===u.pathname||r.matchPath&&Object(f.match)("/team-report-a-cybercrime"+r.matchPath,u.pathname)||"/404.html"===r.path||r.path.match(/^\/404\/?$/)||r.path.match(/^\/offline-plugin-app-shell-fallback\/?$/)||Object(s.navigate)("/team-report-a-cybercrime"+r.path+u.search+u.hash,{replace:!0}),y.default.getResourcesForPathname(u.pathname).then(function(){var t=function(){return Object(a.createElement)(s.Router,{basepath:"/team-report-a-cybercrime"},Object(a.createElement)(e,{path:"/*"}))},n=Object(i.apiRunner)("wrapRootElement",{element:l.a.createElement(t,null)},l.a.createElement(t,null),function(e){return{element:e.result}}).pop(),r=function(){return n},o=Object(i.apiRunner)("replaceHydrateFunction",void 0,c.a.hydrate)[0];h()(function(){o(l.a.createElement(r,null),"undefined"!=typeof window?document.getElementById("___gatsby"):void 0,function(){Object(y.postInitialRenderWork)(),Object(i.apiRunner)("onInitialClientRender")})})})})}],[[141,7]]]); | //# sourceMappingURL=app-d41563861a0c58391d30.js.map |
|
_models_py3.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._source_control_configuration_client_enums import *
class ComplianceStatus(msrest.serialization.Model):
"""Compliance Status details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar compliance_state: The compliance state of the configuration. Possible values include:
"Pending", "Compliant", "Noncompliant", "Installed", "Failed".
:vartype compliance_state: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ComplianceStateType
:param last_config_applied: Datetime the configuration was last applied.
:type last_config_applied: ~datetime.datetime
:param message: Message from when the configuration was applied.
:type message: str
:param message_level: Level of the message. Possible values include: "Error", "Warning",
"Information".
:type message_level: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.MessageLevelType
"""
_validation = {
'compliance_state': {'readonly': True},
}
_attribute_map = {
'compliance_state': {'key': 'complianceState', 'type': 'str'},
'last_config_applied': {'key': 'lastConfigApplied', 'type': 'iso-8601'},
'message': {'key': 'message', 'type': 'str'},
'message_level': {'key': 'messageLevel', 'type': 'str'},
}
def __init__(
self,
*,
last_config_applied: Optional[datetime.datetime] = None,
message: Optional[str] = None,
message_level: Optional[Union[str, "MessageLevelType"]] = None,
**kwargs
):
super(ComplianceStatus, self).__init__(**kwargs)
self.compliance_state = None
self.last_config_applied = last_config_applied
self.message = message
self.message_level = message_level
class ConfigurationIdentity(msrest.serialization.Model):
"""Identity for the managed cluster.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: The principal id of the system assigned identity which is used by the
configuration.
:vartype principal_id: str
:ivar tenant_id: The tenant id of the system assigned identity which is used by the
configuration.
:vartype tenant_id: str
:param type: The type of identity used for the configuration. Type 'SystemAssigned' will use an
implicitly created identity. Type 'None' will not use Managed Identity for the configuration.
Possible values include: "SystemAssigned", "None".
:type type: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ResourceIdentityType
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "ResourceIdentityType"]] = None,
**kwargs
):
super(ConfigurationIdentity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
self.type = type
class ErrorDefinition(msrest.serialization.Model):
"""Error definition.
All required parameters must be populated in order to send to Azure.
:param code: Required. Service specific error code which serves as the substatus for the HTTP
error code.
:type code: str
:param message: Required. Description of the error.
:type message: str
"""
_validation = {
'code': {'required': True},
'message': {'required': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
code: str,
message: str,
**kwargs
):
super(ErrorDefinition, self).__init__(**kwargs)
self.code = code
self.message = message
class ErrorResponse(msrest.serialization.Model):
"""Error response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar error: Error definition.
:vartype error: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ErrorDefinition
"""
_validation = {
'error': {'readonly': True},
}
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDefinition'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.error = None
class Resource(msrest.serialization.Model):
"""The Resource model definition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param system_data: Top level metadata
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources.
:type system_data: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(
self,
*,
system_data: Optional["SystemData"] = None,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.system_data = system_data
class ProxyResource(Resource):
"""ARM proxy resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param system_data: Top level metadata
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources.
:type system_data: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(
self,
*,
system_data: Optional["SystemData"] = None,
**kwargs
):
super(ProxyResource, self).__init__(system_data=system_data, **kwargs)
class ExtensionInstance(ProxyResource):
"""The Extension Instance object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param system_data: Top level metadata
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources.
:type system_data: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.SystemData
:param extension_type: Type of the Extension, of which this resource is an instance of. It
must be one of the Extension Types registered with Microsoft.KubernetesConfiguration by the
Extension publisher.
:type extension_type: str
:param auto_upgrade_minor_version: Flag to note if this instance participates in auto upgrade
of minor version, or not.
:type auto_upgrade_minor_version: bool
:param release_train: ReleaseTrain this extension instance participates in for auto-upgrade
(e.g. Stable, Preview, etc.) - only if autoUpgradeMinorVersion is 'true'.
:type release_train: str
:param version: Version of the extension for this extension instance, if it is 'pinned' to a
specific version. autoUpgradeMinorVersion must be 'false'.
:type version: str
:param scope: Scope at which the extension instance is installed.
:type scope: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.Scope
:param configuration_settings: Configuration settings, as name-value pairs for configuring this
instance of the extension.
:type configuration_settings: dict[str, str]
:param configuration_protected_settings: Configuration settings that are sensitive, as
name-value pairs for configuring this instance of the extension.
:type configuration_protected_settings: dict[str, str]
:ivar install_state: Status of installation of this instance of the extension. Possible values
include: "Pending", "Installed", "Failed".
:vartype install_state: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.InstallStateType
:param statuses: Status from this instance of the extension.
:type statuses:
list[~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ExtensionStatus]
:ivar creation_time: DateLiteral (per ISO8601) noting the time the resource was created by the
client (user).
:vartype creation_time: str
:ivar last_modified_time: DateLiteral (per ISO8601) noting the time the resource was modified
by the client (user).
:vartype last_modified_time: str
:ivar last_status_time: DateLiteral (per ISO8601) noting the time of last status from the
agent.
:vartype last_status_time: str
:ivar error_info: Error information from the Agent - e.g. errors during installation.
:vartype error_info:
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ErrorDefinition
:param identity: The identity of the configuration.
:type identity:
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ConfigurationIdentity
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'install_state': {'readonly': True},
'creation_time': {'readonly': True},
'last_modified_time': {'readonly': True},
'last_status_time': {'readonly': True},
'error_info': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'extension_type': {'key': 'properties.extensionType', 'type': 'str'},
'auto_upgrade_minor_version': {'key': 'properties.autoUpgradeMinorVersion', 'type': 'bool'},
'release_train': {'key': 'properties.releaseTrain', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'scope': {'key': 'properties.scope', 'type': 'Scope'},
'configuration_settings': {'key': 'properties.configurationSettings', 'type': '{str}'},
'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'},
'install_state': {'key': 'properties.installState', 'type': 'str'},
'statuses': {'key': 'properties.statuses', 'type': '[ExtensionStatus]'},
'creation_time': {'key': 'properties.creationTime', 'type': 'str'},
'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'str'},
'last_status_time': {'key': 'properties.lastStatusTime', 'type': 'str'},
'error_info': {'key': 'properties.errorInfo', 'type': 'ErrorDefinition'},
'identity': {'key': 'identity', 'type': 'ConfigurationIdentity'},
}
def __init__(
self,
*,
system_data: Optional["SystemData"] = None,
extension_type: Optional[str] = None,
auto_upgrade_minor_version: Optional[bool] = None,
release_train: Optional[str] = "Stable",
version: Optional[str] = None,
scope: Optional["Scope"] = None,
configuration_settings: Optional[Dict[str, str]] = None,
configuration_protected_settings: Optional[Dict[str, str]] = None,
statuses: Optional[List["ExtensionStatus"]] = None,
identity: Optional["ConfigurationIdentity"] = None,
**kwargs
):
super(ExtensionInstance, self).__init__(system_data=system_data, **kwargs)
self.extension_type = extension_type
self.auto_upgrade_minor_version = auto_upgrade_minor_version
self.release_train = release_train
self.version = version
self.scope = scope
self.configuration_settings = configuration_settings
self.configuration_protected_settings = configuration_protected_settings
self.install_state = None
self.statuses = statuses
self.creation_time = None
self.last_modified_time = None
self.last_status_time = None
self.error_info = None
self.identity = identity
class ExtensionInstancesList(msrest.serialization.Model):
"""Result of the request to list Extension Instances. It contains a list of ExtensionInstance objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of Extension Instances within a Kubernetes cluster.
:vartype value:
list[~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ExtensionInstance]
:ivar next_link: URL to get the next set of extension instance objects, if any.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExtensionInstance]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExtensionInstancesList, self).__init__(**kwargs)
self.value = None
self.next_link = None
class ExtensionInstanceUpdate(msrest.serialization.Model):
"""Update Extension Instance request object.
:param auto_upgrade_minor_version: Flag to note if this instance participates in Extension
Lifecycle Management or not.
:type auto_upgrade_minor_version: bool
:param release_train: ReleaseTrain this extension instance participates in for auto-upgrade
(e.g. Stable, Preview, etc.) - only if autoUpgradeMinorVersion is 'true'.
:type release_train: str
:param version: Version number of extension, to 'pin' to a specific version.
autoUpgradeMinorVersion must be 'false'.
:type version: str
"""
_attribute_map = {
'auto_upgrade_minor_version': {'key': 'properties.autoUpgradeMinorVersion', 'type': 'bool'},
'release_train': {'key': 'properties.releaseTrain', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
}
def __init__(
self,
*,
auto_upgrade_minor_version: Optional[bool] = None,
release_train: Optional[str] = "Stable",
version: Optional[str] = None,
**kwargs
):
super(ExtensionInstanceUpdate, self).__init__(**kwargs)
self.auto_upgrade_minor_version = auto_upgrade_minor_version
self.release_train = release_train
self.version = version
class ExtensionStatus(msrest.serialization.Model):
"""Status from this instance of the extension.
:param code: Status code provided by the Extension.
:type code: str
:param display_status: Short description of status of this instance of the extension.
:type display_status: str
:param level: Level of the status. Possible values include: "Error", "Warning", "Information".
Default value: "Information".
:type level: str or ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.LevelType
:param message: Detailed message of the status from the Extension instance.
:type message: str
:param time: DateLiteral (per ISO8601) noting the time of installation status.
:type time: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'display_status': {'key': 'displayStatus', 'type': 'str'},
'level': {'key': 'level', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'time': {'key': 'time', 'type': 'str'},
}
def __init__(
self,
*,
code: Optional[str] = None,
display_status: Optional[str] = None,
level: Optional[Union[str, "LevelType"]] = "Information",
message: Optional[str] = None,
time: Optional[str] = None,
**kwargs
):
super(ExtensionStatus, self).__init__(**kwargs)
self.code = code
self.display_status = display_status
self.level = level
self.message = message
self.time = time
class HelmOperatorProperties(msrest.serialization.Model):
"""Properties for Helm operator.
:param chart_version: Version of the operator Helm chart.
:type chart_version: str
:param chart_values: Values override for the operator Helm chart.
:type chart_values: str
"""
_attribute_map = {
'chart_version': {'key': 'chartVersion', 'type': 'str'},
'chart_values': {'key': 'chartValues', 'type': 'str'},
}
def __init__(
self,
*,
chart_version: Optional[str] = None,
chart_values: Optional[str] = None,
**kwargs
):
super(HelmOperatorProperties, self).__init__(**kwargs)
self.chart_version = chart_version
self.chart_values = chart_values
class ResourceProviderOperation(msrest.serialization.Model):
"""Supported operation of this resource provider.
Variables are only populated by the server, and will be ignored when sending a request.
:param name: Operation name, in format of {provider}/{resource}/{operation}.
:type name: str
:param display: Display metadata associated with the operation.
:type display:
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ResourceProviderOperationDisplay
:ivar is_data_action: The flag that indicates whether the operation applies to data plane.
:vartype is_data_action: bool
"""
_validation = {
'is_data_action': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'ResourceProviderOperationDisplay'},
'is_data_action': {'key': 'isDataAction', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display: Optional["ResourceProviderOperationDisplay"] = None,
**kwargs
):
super(ResourceProviderOperation, self).__init__(**kwargs)
self.name = name
self.display = display
self.is_data_action = None
class ResourceProviderOperationDisplay(msrest.serialization.Model):
"""Display metadata associated with the operation.
:param provider: Resource provider: Microsoft KubernetesConfiguration.
:type provider: str
:param resource: Resource on which the operation is performed.
:type resource: str
:param operation: Type of operation: get, read, delete, etc.
:type operation: str
:param description: Description of this operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
provider: Optional[str] = None,
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
super(ResourceProviderOperationDisplay, self).__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class ResourceProviderOperationList(msrest.serialization.Model):
"""Result of the request to list operations.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: List of operations supported by this resource provider.
:type value:
list[~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ResourceProviderOperation]
:ivar next_link: URL to the next set of results, if any.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceProviderOperation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ResourceProviderOperation"]] = None,
**kwargs
):
super(ResourceProviderOperationList, self).__init__(**kwargs)
self.value = value
self.next_link = None
class Result(msrest.serialization.Model):
"""Sample result definition.
:param sample_property: Sample property of type string.
:type sample_property: str
"""
_attribute_map = {
'sample_property': {'key': 'sampleProperty', 'type': 'str'},
}
def __init__(
self,
*,
sample_property: Optional[str] = None,
**kwargs
):
super(Result, self).__init__(**kwargs)
self.sample_property = sample_property
class Scope(msrest.serialization.Model):
"""Scope of the extensionInstance. It can be either Cluster or Namespace; but not both.
:param cluster: Specifies that the scope of the extensionInstance is Cluster.
:type cluster: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ScopeCluster
:param namespace: Specifies that the scope of the extensionInstance is Namespace.
:type namespace: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ScopeNamespace
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'ScopeCluster'},
'namespace': {'key': 'namespace', 'type': 'ScopeNamespace'},
}
def __init__(
self,
*,
cluster: Optional["ScopeCluster"] = None,
namespace: Optional["ScopeNamespace"] = None,
**kwargs
):
super(Scope, self).__init__(**kwargs)
self.cluster = cluster
self.namespace = namespace
class ScopeCluster(msrest.serialization.Model):
"""Specifies that the scope of the extensionInstance is Cluster.
:param release_namespace: Namespace where the extension Release must be placed, for a Cluster
scoped extensionInstance. If this namespace does not exist, it will be created.
:type release_namespace: str
"""
_attribute_map = {
'release_namespace': {'key': 'releaseNamespace', 'type': 'str'},
}
def __init__(
self,
*,
release_namespace: Optional[str] = None,
**kwargs
):
super(ScopeCluster, self).__init__(**kwargs)
self.release_namespace = release_namespace
class ScopeNamespace(msrest.serialization.Model):
"""Specifies that the scope of the extensionInstance is Namespace.
:param target_namespace: Namespace where the extensionInstance will be created for an Namespace
scoped extensionInstance. If this namespace does not exist, it will be created.
:type target_namespace: str
"""
_attribute_map = {
'target_namespace': {'key': 'targetNamespace', 'type': 'str'},
}
def __init__(
self,
*,
target_namespace: Optional[str] = None,
**kwargs
):
super(ScopeNamespace, self).__init__(**kwargs)
self.target_namespace = target_namespace
class SourceControlConfiguration(ProxyResource):
"""The SourceControl Configuration object returned in Get & Put response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param system_data: Top level metadata
https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources.
:type system_data: ~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.SystemData
:param repository_url: Url of the SourceControl Repository.
:type repository_url: str
:param operator_namespace: The namespace to which this operator is installed to. Maximum of 253
lower case alphanumeric characters, hyphen and period only.
:type operator_namespace: str
:param operator_instance_name: Instance name of the operator - identifying the specific
configuration.
:type operator_instance_name: str
:param operator_type: Type of the operator. Possible values include: "Flux".
:type operator_type: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.OperatorType
:param operator_params: Any Parameters for the Operator instance in string format.
:type operator_params: str
:param configuration_protected_settings: Name-value pairs of protected configuration settings
for the configuration.
:type configuration_protected_settings: dict[str, str]
:param operator_scope: Scope at which the operator will be installed. Possible values include:
"cluster", "namespace". Default value: "cluster".
:type operator_scope: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.OperatorScopeType
:ivar repository_public_key: Public Key associated with this SourceControl configuration
(either generated within the cluster or provided by the user).
:vartype repository_public_key: str
:param ssh_known_hosts_contents: Base64-encoded known_hosts contents containing public SSH keys
required to access private Git instances.
:type ssh_known_hosts_contents: str
:param enable_helm_operator: Option to enable Helm Operator for this git configuration.
:type enable_helm_operator: bool
:param helm_operator_properties: Properties for Helm operator.
:type helm_operator_properties:
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.HelmOperatorProperties
:ivar provisioning_state: The provisioning state of the resource provider. Possible values
include: "Accepted", "Deleting", "Running", "Succeeded", "Failed".
:vartype provisioning_state: str or
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ProvisioningStateType
:ivar compliance_status: Compliance Status of the Configuration.
:vartype compliance_status:
~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.ComplianceStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'repository_public_key': {'readonly': True},
'provisioning_state': {'readonly': True},
'compliance_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'repository_url': {'key': 'properties.repositoryUrl', 'type': 'str'},
'operator_namespace': {'key': 'properties.operatorNamespace', 'type': 'str'},
'operator_instance_name': {'key': 'properties.operatorInstanceName', 'type': 'str'},
'operator_type': {'key': 'properties.operatorType', 'type': 'str'},
'operator_params': {'key': 'properties.operatorParams', 'type': 'str'},
'configuration_protected_settings': {'key': 'properties.configurationProtectedSettings', 'type': '{str}'},
'operator_scope': {'key': 'properties.operatorScope', 'type': 'str'},
'repository_public_key': {'key': 'properties.repositoryPublicKey', 'type': 'str'},
'ssh_known_hosts_contents': {'key': 'properties.sshKnownHostsContents', 'type': 'str'},
'enable_helm_operator': {'key': 'properties.enableHelmOperator', 'type': 'bool'},
'helm_operator_properties': {'key': 'properties.helmOperatorProperties', 'type': 'HelmOperatorProperties'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'compliance_status': {'key': 'properties.complianceStatus', 'type': 'ComplianceStatus'},
}
def __init__(
self,
*,
system_data: Optional["SystemData"] = None,
repository_url: Optional[str] = None,
operator_namespace: Optional[str] = "default",
operator_instance_name: Optional[str] = None,
operator_type: Optional[Union[str, "OperatorType"]] = None,
operator_params: Optional[str] = None,
configuration_protected_settings: Optional[Dict[str, str]] = None,
operator_scope: Optional[Union[str, "OperatorScopeType"]] = "cluster",
ssh_known_hosts_contents: Optional[str] = None,
enable_helm_operator: Optional[bool] = None,
helm_operator_properties: Optional["HelmOperatorProperties"] = None,
**kwargs
):
super(SourceControlConfiguration, self).__init__(system_data=system_data, **kwargs)
self.repository_url = repository_url
self.operator_namespace = operator_namespace
self.operator_instance_name = operator_instance_name
self.operator_type = operator_type
self.operator_params = operator_params
self.configuration_protected_settings = configuration_protected_settings
self.operator_scope = operator_scope
self.repository_public_key = None
self.ssh_known_hosts_contents = ssh_known_hosts_contents
self.enable_helm_operator = enable_helm_operator
self.helm_operator_properties = helm_operator_properties
self.provisioning_state = None
self.compliance_status = None
class | (msrest.serialization.Model):
"""Result of the request to list Source Control Configurations. It contains a list of SourceControlConfiguration objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of Source Control Configurations within a Kubernetes cluster.
:vartype value:
list[~azure.mgmt.kubernetesconfiguration.v2020_07_01_preview.models.SourceControlConfiguration]
:ivar next_link: URL to get the next set of configuration objects, if any.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SourceControlConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SourceControlConfigurationList, self).__init__(**kwargs)
self.value = None
self.next_link = None
class SystemData(msrest.serialization.Model):
"""Top level metadata https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar created_by: A string identifier for the identity that created the resource.
:vartype created_by: str
:ivar created_by_type: The type of identity that created the resource: user, application,
managedIdentity, key.
:vartype created_by_type: str
:ivar created_at: The timestamp of resource creation (UTC).
:vartype created_at: ~datetime.datetime
:ivar last_modified_by: A string identifier for the identity that last modified the resource.
:vartype last_modified_by: str
:ivar last_modified_by_type: The type of identity that last modified the resource: user,
application, managedIdentity, key.
:vartype last_modified_by_type: str
:ivar last_modified_at: The timestamp of resource last modification (UTC).
:vartype last_modified_at: ~datetime.datetime
"""
_validation = {
'created_by': {'readonly': True},
'created_by_type': {'readonly': True},
'created_at': {'readonly': True},
'last_modified_by': {'readonly': True},
'last_modified_by_type': {'readonly': True},
'last_modified_at': {'readonly': True},
}
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(SystemData, self).__init__(**kwargs)
self.created_by = None
self.created_by_type = None
self.created_at = None
self.last_modified_by = None
self.last_modified_by_type = None
self.last_modified_at = None
| SourceControlConfigurationList |
bot.js | const Condition = require('../Condition');
module.exports = class BotCondition extends Condition {
constructor(client) {
super(client, {
id: 'bot', | });
}
/**
*
* @param {string} type
* @param {import('../../commands/DiscordCommand')} command
* @param {import('discord.js').Message} message
*/
async run(type, command, message) {
return !message.author.bot;
}
}; | name: 'bot',
priority: 10,
type: 'message', |
catch.rs | use crate::Publisher;
impl<'a, T: 'a + Send> Publisher<'a, T> {
pub fn | (self) -> Publisher<'a, T> {
unimplemented!()
}
}
| catch |
278. First Bad Version.py | # The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
class Solution(object):
def firstBadVersion(self, n):
"""
:type n: int
:rtype: int | """
low, high = 1, n
while True:
if isBadVersion(low):
return low
mid = low + (high-low)/2
isBad = isBadVersion(mid)
if isBad:
high = mid
else:
low = mid + 1 | |
thresholder.py | #!/home/tarasen/Studying/3kurs/kursova/Django-Agregator-Site/env/bin/python3
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates how a 1-bit BitmapImage can be used
# as a dynamically updated overlay
#
import sys
if sys.version_info[0] > 2:
import tkinter
else:
import Tkinter as tkinter
from PIL import Image, ImageTk
#
# an image viewer
class UI(tkinter.Frame):
def __init__(self, master, im, value=128):
tkinter.Frame.__init__(self, master)
self.image = im
self.value = value
self.canvas = tkinter.Canvas(self, width=im.size[0], height=im.size[1])
self.backdrop = ImageTk.PhotoImage(im)
self.canvas.create_image(0, 0, image=self.backdrop, anchor=tkinter.NW)
self.canvas.pack()
scale = tkinter.Scale(self, orient=tkinter.HORIZONTAL, from_=0, to=255,
resolution=1, command=self.update_scale,
length=256)
scale.set(value)
scale.bind("<ButtonRelease-1>", self.redraw)
scale.pack()
# uncomment the following line for instant feedback (might
# be too slow on some platforms)
# self.redraw()
def | (self, value):
self.value = float(value)
self.redraw()
def redraw(self, event=None):
# create overlay (note the explicit conversion to mode "1")
im = self.image.point(lambda v, t=self.value: v >= t, "1")
self.overlay = ImageTk.BitmapImage(im, foreground="green")
# update canvas
self.canvas.delete("overlay")
self.canvas.create_image(0, 0, image=self.overlay, anchor=tkinter.NW,
tags="overlay")
# --------------------------------------------------------------------
# main
if len(sys.argv) != 2:
print("Usage: thresholder file")
sys.exit(1)
root = tkinter.Tk()
im = Image.open(sys.argv[1])
if im.mode != "L":
im = im.convert("L")
# im.thumbnail((320,200))
UI(root, im).pack()
root.mainloop()
| update_scale |
network_test.go | // Copyright (c) 2013-2014 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package addrmgr_test
import (
"net"
"testing"
"github.com/btcsuite/btcd/addrmgr"
"github.com/btcsuite/btcd/wire"
)
// TestIPTypes ensures the various functions which determine the type of an IP
// address based on RFCs work as intended.
func TestIPTypes(t *testing.T) {
type ipTest struct {
in wire.NetAddress
rfc1918 bool
rfc2544 bool
rfc3849 bool
rfc3927 bool
rfc3964 bool
rfc4193 bool
rfc4380 bool
rfc4843 bool
rfc4862 bool
rfc5737 bool
rfc6052 bool
rfc6145 bool
rfc6598 bool
local bool
valid bool
routable bool
}
newIPTest := func(ip string, rfc1918, rfc2544, rfc3849, rfc3927, rfc3964,
rfc4193, rfc4380, rfc4843, rfc4862, rfc5737, rfc6052, rfc6145, rfc6598,
local, valid, routable bool) ipTest {
nip := net.ParseIP(ip)
na := *wire.NewNetAddressIPPort(nip, 21102, wire.SFNodeNetwork)
test := ipTest{na, rfc1918, rfc2544, rfc3849, rfc3927, rfc3964, rfc4193, rfc4380,
rfc4843, rfc4862, rfc5737, rfc6052, rfc6145, rfc6598, local, valid, routable}
return test
}
tests := []ipTest{
newIPTest("10.255.255.255", true, false, false, false, false, false,
false, false, false, false, false, false, false, false, true, false),
newIPTest("192.168.0.1", true, false, false, false, false, false,
false, false, false, false, false, false, false, false, true, false),
newIPTest("172.31.255.1", true, false, false, false, false, false,
false, false, false, false, false, false, false, false, true, false),
newIPTest("172.32.1.1", false, false, false, false, false, false, false, false,
false, false, false, false, false, false, true, true),
newIPTest("169.254.250.120", false, false, false, true, false, false,
false, false, false, false, false, false, false, false, true, false),
newIPTest("0.0.0.0", false, false, false, false, false, false, false,
false, false, false, false, false, false, true, false, false),
newIPTest("255.255.255.255", false, false, false, false, false, false,
false, false, false, false, false, false, false, false, false, false),
newIPTest("127.0.0.1", false, false, false, false, false, false,
false, false, false, false, false, false, false, true, true, false),
newIPTest("fd00:dead::1", false, false, false, false, false, true,
false, false, false, false, false, false, false, false, true, false),
newIPTest("2001::1", false, false, false, false, false, false,
true, false, false, false, false, false, false, false, true, true),
newIPTest("2001:10:abcd::1:1", false, false, false, false, false, false,
false, true, false, false, false, false, false, false, true, false),
newIPTest("fe80::1", false, false, false, false, false, false,
false, false, true, false, false, false, false, false, true, false),
newIPTest("fe80:1::1", false, false, false, false, false, false,
false, false, false, false, false, false, false, false, true, true),
newIPTest("64:ff9b::1", false, false, false, false, false, false,
false, false, false, false, true, false, false, false, true, true),
newIPTest("::ffff:abcd:ef12:1", false, false, false, false, false, false,
false, false, false, false, false, false, false, false, true, true),
newIPTest("::1", false, false, false, false, false, false, false, false,
false, false, false, false, false, true, true, false),
newIPTest("198.18.0.1", false, true, false, false, false, false, false,
false, false, false, false, false, false, false, true, false),
newIPTest("100.127.255.1", false, false, false, false, false, false, false,
false, false, false, false, false, true, false, true, false),
newIPTest("203.0.113.1", false, false, false, false, false, false, false,
false, false, false, false, false, false, false, true, false),
}
t.Logf("Running %d tests", len(tests))
for _, test := range tests {
if rv := addrmgr.IsRFC1918(&test.in); rv != test.rfc1918 {
t.Errorf("IsRFC1918 %s\n got: %v want: %v", test.in.IP, rv, test.rfc1918)
}
if rv := addrmgr.IsRFC3849(&test.in); rv != test.rfc3849 {
t.Errorf("IsRFC3849 %s\n got: %v want: %v", test.in.IP, rv, test.rfc3849)
}
if rv := addrmgr.IsRFC3927(&test.in); rv != test.rfc3927 {
t.Errorf("IsRFC3927 %s\n got: %v want: %v", test.in.IP, rv, test.rfc3927)
}
if rv := addrmgr.IsRFC3964(&test.in); rv != test.rfc3964 {
t.Errorf("IsRFC3964 %s\n got: %v want: %v", test.in.IP, rv, test.rfc3964)
}
if rv := addrmgr.IsRFC4193(&test.in); rv != test.rfc4193 {
t.Errorf("IsRFC4193 %s\n got: %v want: %v", test.in.IP, rv, test.rfc4193)
}
| if rv := addrmgr.IsRFC4380(&test.in); rv != test.rfc4380 {
t.Errorf("IsRFC4380 %s\n got: %v want: %v", test.in.IP, rv, test.rfc4380)
}
if rv := addrmgr.IsRFC4843(&test.in); rv != test.rfc4843 {
t.Errorf("IsRFC4843 %s\n got: %v want: %v", test.in.IP, rv, test.rfc4843)
}
if rv := addrmgr.IsRFC4862(&test.in); rv != test.rfc4862 {
t.Errorf("IsRFC4862 %s\n got: %v want: %v", test.in.IP, rv, test.rfc4862)
}
if rv := addrmgr.IsRFC6052(&test.in); rv != test.rfc6052 {
t.Errorf("isRFC6052 %s\n got: %v want: %v", test.in.IP, rv, test.rfc6052)
}
if rv := addrmgr.IsRFC6145(&test.in); rv != test.rfc6145 {
t.Errorf("IsRFC1918 %s\n got: %v want: %v", test.in.IP, rv, test.rfc6145)
}
if rv := addrmgr.IsLocal(&test.in); rv != test.local {
t.Errorf("IsLocal %s\n got: %v want: %v", test.in.IP, rv, test.local)
}
if rv := addrmgr.IsValid(&test.in); rv != test.valid {
t.Errorf("IsValid %s\n got: %v want: %v", test.in.IP, rv, test.valid)
}
if rv := addrmgr.IsRoutable(&test.in); rv != test.routable {
t.Errorf("IsRoutable %s\n got: %v want: %v", test.in.IP, rv, test.routable)
}
}
}
// TestGroupKey tests the GroupKey function to ensure it properly groups various
// IP addresses.
func TestGroupKey(t *testing.T) {
tests := []struct {
name string
ip string
expected string
}{
// Local addresses.
{name: "ipv4 localhost", ip: "127.0.0.1", expected: "local"},
{name: "ipv6 localhost", ip: "::1", expected: "local"},
{name: "ipv4 zero", ip: "0.0.0.0", expected: "local"},
{name: "ipv4 first octet zero", ip: "0.1.2.3", expected: "local"},
// Unroutable addresses.
{name: "ipv4 invalid bcast", ip: "255.255.255.255", expected: "unroutable"},
{name: "ipv4 rfc1918 10/8", ip: "10.1.2.3", expected: "unroutable"},
{name: "ipv4 rfc1918 172.16/12", ip: "172.16.1.2", expected: "unroutable"},
{name: "ipv4 rfc1918 192.168/16", ip: "192.168.1.2", expected: "unroutable"},
{name: "ipv6 rfc3849 2001:db8::/32", ip: "2001:db8::1234", expected: "unroutable"},
{name: "ipv4 rfc3927 169.254/16", ip: "169.254.1.2", expected: "unroutable"},
{name: "ipv6 rfc4193 fc00::/7", ip: "fc00::1234", expected: "unroutable"},
{name: "ipv6 rfc4843 2001:10::/28", ip: "2001:10::1234", expected: "unroutable"},
{name: "ipv6 rfc4862 fe80::/64", ip: "fe80::1234", expected: "unroutable"},
// IPv4 normal.
{name: "ipv4 normal class a", ip: "12.1.2.3", expected: "12.1.0.0"},
{name: "ipv4 normal class b", ip: "173.1.2.3", expected: "173.1.0.0"},
{name: "ipv4 normal class c", ip: "196.1.2.3", expected: "196.1.0.0"},
// IPv6/IPv4 translations.
{name: "ipv6 rfc3964 with ipv4 encap", ip: "2002:0c01:0203::", expected: "12.1.0.0"},
{name: "ipv6 rfc4380 toredo ipv4", ip: "2001:0:1234::f3fe:fdfc", expected: "12.1.0.0"},
{name: "ipv6 rfc6052 well-known prefix with ipv4", ip: "64:ff9b::0c01:0203", expected: "12.1.0.0"},
{name: "ipv6 rfc6145 translated ipv4", ip: "::ffff:0:0c01:0203", expected: "12.1.0.0"},
// Tor.
{name: "ipv6 tor onioncat", ip: "fd87:d87e:eb43:1234::5678", expected: "tor:2"},
{name: "ipv6 tor onioncat 2", ip: "fd87:d87e:eb43:1245::6789", expected: "tor:2"},
{name: "ipv6 tor onioncat 3", ip: "fd87:d87e:eb43:1345::6789", expected: "tor:3"},
// IPv6 normal.
{name: "ipv6 normal", ip: "2602:100::1", expected: "2602:100::"},
{name: "ipv6 normal 2", ip: "2602:0100::1234", expected: "2602:100::"},
{name: "ipv6 hurricane electric", ip: "2001:470:1f10:a1::2", expected: "2001:470:1000::"},
{name: "ipv6 hurricane electric 2", ip: "2001:0470:1f10:a1::2", expected: "2001:470:1000::"},
}
for i, test := range tests {
nip := net.ParseIP(test.ip)
na := *wire.NewNetAddressIPPort(nip, 21102, wire.SFNodeNetwork)
if key := addrmgr.GroupKey(&na); key != test.expected {
t.Errorf("TestGroupKey #%d (%s): unexpected group key "+
"- got '%s', want '%s'", i, test.name,
key, test.expected)
}
}
} | |
main.rs | #![recursion_limit="128"]
#[macro_use]
extern crate log;
extern crate web_logger;
extern crate strum;
#[macro_use]
extern crate strum_macros;
#[macro_use]
extern crate yew;
extern crate counter;
extern crate crm;
extern crate custom_components;
extern crate dashboard;
extern crate fragments;
extern crate game_of_life;
extern crate inner_html;
extern crate large_table;
extern crate mount_point;
extern crate npm_and_rest;
extern crate textarea;
extern crate timer;
extern crate todomvc;
extern crate two_apps;
use strum::IntoEnumIterator;
use std::str::FromStr;
use yew::prelude::*;
use counter::Model as Counter;
use crm::Model as Crm;
use custom_components::Model as CustomComponents;
use dashboard::Model as Dashboard;
use fragments::Model as Fragments;
use game_of_life::Model as GameOfLife;
use inner_html::Model as InnerHtml;
use large_table::Model as LargeTable;
use mount_point::Model as MountPoint;
use npm_and_rest::Model as NpmAndRest;
use textarea::Model as Textarea;
use timer::Model as Timer;
use todomvc::Model as Todomvc;
use two_apps::Model as TwoApps;
#[derive(Debug, Display, EnumString, EnumIter)]
enum Scene {
NotSelected,
Counter,
Crm,
CustomComponents,
Dashboard,
Fragments,
GameOfLife,
InnerHtml,
LargeTable,
MountPoint,
NpmAndRest,
Textarea,
Timer,
Todomvc,
TwoApps,
}
enum Msg {
SwitchTo(Scene),
}
impl Component for Scene {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
Scene::NotSelected
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::SwitchTo(scene) => {
*self = scene;
true
}
}
}
}
impl Renderable<Scene> for Scene {
fn view(&self) -> Html<Self> {
let _options = Scene::iter().map(|scene| {
html! {
<option value={ scene.to_string() }, > { scene.to_string() } </option>
}
});
html! {
<div id="fullscreen",>
<div id="left_pane",>
<h2>{ "Yew showcase" }</h2>
<select size="20", value={Scene::NotSelected.to_string()},
onchange=|cd| {
let scene = match cd {
ChangeData::Select(se) => se.value().unwrap(),
_ => unreachable!()
};
match Scene::from_str(&scene) {
Ok(scene) => Msg::SwitchTo(scene),
_ => unreachable!(),
}
}
, >
{ for _options }
</select>
</div>
<div id="right_pane",>
<h2>{ self.to_string() }</h2>
{ self.view_scene() }
</div>
</div>
}
}
}
impl Scene {
fn | (&self) -> Html<Self> {
match *self {
Scene::NotSelected => {
html! {
<p>{ "Select the scene, please." }</p>
}
}
Scene::Counter => {
html! {
<Counter: />
}
}
Scene::Crm => {
html! {
<Crm: />
}
}
Scene::CustomComponents => {
html! {
<CustomComponents: />
}
}
Scene::Dashboard => {
html! {
<Dashboard: />
}
}
Scene::Fragments => {
html! {
<Fragments: />
}
}
Scene::GameOfLife => {
html! {
<GameOfLife: />
}
}
Scene::InnerHtml => {
html! {
<InnerHtml: />
}
}
Scene::LargeTable => {
html! {
<LargeTable: />
}
}
Scene::MountPoint => {
html! {
<MountPoint: />
}
}
Scene::NpmAndRest => {
html! {
<NpmAndRest: />
}
}
Scene::Textarea => {
html! {
<Textarea: />
}
}
Scene::Timer => {
html! {
<Timer: />
}
}
Scene::Todomvc => {
html! {
<Todomvc: />
}
}
Scene::TwoApps => {
html! {
<TwoApps: />
}
}
}
}
}
fn main() {
web_logger::init();
trace!("Initializing yew...");
yew::initialize();
trace!("Creating an application instance...");
let app: App<Scene> = App::new();
trace!("Mount the App to the body of the page...");
app.mount_to_body();
trace!("Run");
yew::run_loop();
}
| view_scene |
errors6.rs | // errors6.rs
// Using catch-all error types like `Box<dyn error::Error>` isn't recommended
// for library code, where callers might want to make decisions based on the
// error content, instead of printing it out or propagating it further. Here,
// we define a custom error type to make it possible for callers to decide
// what to do next when our function returns an error.
// Make these tests pass! Execute `rustlings hint errors6` for hints :)
use std::num::ParseIntError;
// This is a custom error type that we will be using in `parse_pos_nonzero()`.
#[derive(PartialEq, Debug)]
enum ParsePosNonzeroError {
Creation(CreationError),
ParseInt(ParseIntError)
}
impl ParsePosNonzeroError {
fn from_creation(err: CreationError) -> ParsePosNonzeroError {
ParsePosNonzeroError::Creation(err)
}
// TODO: add another error conversion function here.
fn from_parseint(err: ParseIntError) -> ParsePosNonzeroError {
ParsePosNonzeroError::ParseInt(err)
}
}
fn parse_pos_nonzero(s: &str)
-> Result<PositiveNonzeroInteger, ParsePosNonzeroError>
{
// TODO: change this to return an appropriate error instead of panicking
// when `parse()` returns an error.
let x: i64 = s.parse().map_err(ParsePosNonzeroError::from_parseint)?;
PositiveNonzeroInteger::new(x)
.map_err(ParsePosNonzeroError::from_creation)
}
// Don't change anything below this line.
#[derive(PartialEq, Debug)]
struct PositiveNonzeroInteger(u64);
#[derive(PartialEq, Debug)]
enum CreationError {
Negative,
Zero,
}
impl PositiveNonzeroInteger {
fn new(value: i64) -> Result<PositiveNonzeroInteger, CreationError> {
match value {
x if x < 0 => Err(CreationError::Negative),
x if x == 0 => Err(CreationError::Zero),
x => Ok(PositiveNonzeroInteger(x as u64))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_parse_error() |
#[test]
fn test_negative() {
assert_eq!(
parse_pos_nonzero("-555"),
Err(ParsePosNonzeroError::Creation(CreationError::Negative))
);
}
#[test]
fn test_zero() {
assert_eq!(
parse_pos_nonzero("0"),
Err(ParsePosNonzeroError::Creation(CreationError::Zero))
);
}
#[test]
fn test_positive() {
let x = PositiveNonzeroInteger::new(42);
assert!(x.is_ok());
assert_eq!(parse_pos_nonzero("42"), Ok(x.unwrap()));
}
}
| {
// We can't construct a ParseIntError, so we have to pattern match.
assert!(matches!(
parse_pos_nonzero("not a number"),
Err(ParsePosNonzeroError::ParseInt(_))
));
} |
conftest.py | # Copyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import boto3
import os
import logging
import platform
import pytest
import shutil
import sys
import tempfile
from sagemaker import LocalSession, Session
from sagemaker.pytorch import PyTorch
from .utils import image_utils
logger = logging.getLogger(__name__)
logging.getLogger('boto').setLevel(logging.INFO)
logging.getLogger('boto3').setLevel(logging.INFO)
logging.getLogger('botocore').setLevel(logging.INFO)
logging.getLogger('factory.py').setLevel(logging.INFO)
logging.getLogger('auth.py').setLevel(logging.INFO)
logging.getLogger('connectionpool.py').setLevel(logging.INFO)
dir_path = os.path.dirname(os.path.realpath(__file__))
NO_P2_REGIONS = ['ap-east-1', 'ap-northeast-3', 'ap-southeast-2', 'ca-central-1', 'eu-central-1', 'eu-north-1',
'eu-west-2', 'eu-west-3', 'us-west-1', 'sa-east-1', 'me-south-1']
NO_P3_REGIONS = ['ap-east-1', 'ap-northeast-3', 'ap-southeast-1', 'ap-southeast-2', 'ap-south-1', 'ca-central-1',
'eu-central-1', 'eu-north-1', 'eu-west-2', 'eu-west-3', 'sa-east-1', 'us-west-1', 'me-south-1']
def pytest_addoption(parser):
parser.addoption('--build-image', '-D', action='store_true')
parser.addoption('--build-base-image', '-B', action='store_true')
parser.addoption('--aws-id')
parser.addoption('--instance-type')
parser.addoption('--accelerator-type', default=None)
parser.addoption('--docker-base-name', default='pytorch')
parser.addoption('--region', default='us-west-2')
parser.addoption('--framework-version', default=PyTorch.LATEST_VERSION)
parser.addoption('--py-version', choices=['2', '3'], default=str(sys.version_info.major))
# Processor is still "cpu" for EIA tests
parser.addoption('--processor', choices=['gpu', 'cpu', 'eia'], default='cpu')
# If not specified, will default to {framework-version}-{processor}-py{py-version}
parser.addoption('--tag', default=None)
parser.addoption('--generate-coverage-doc', default=False, action='store_true',
help='use this option to generate test coverage doc')
def pytest_collection_modifyitems(session, config, items):
if config.getoption("--generate-coverage-doc"):
from test.test_utils.test_reporting import TestReportGenerator
report_generator = TestReportGenerator(items, is_sagemaker=True)
report_generator.generate_coverage_doc(framework="pytorch", job_type="inference")
@pytest.fixture(scope='session', name='docker_base_name')
def fixture_docker_base_name(request):
|
@pytest.fixture(scope='session', name='region')
def fixture_region(request):
return request.config.getoption('--region')
@pytest.fixture(scope='session', name='framework_version')
def fixture_framework_version(request):
return request.config.getoption('--framework-version')
@pytest.fixture(scope='session', name='py_version')
def fixture_py_version(request):
return 'py{}'.format(int(request.config.getoption('--py-version')))
@pytest.fixture(scope='session', name='processor')
def fixture_processor(request):
return request.config.getoption('--processor')
@pytest.fixture(scope='session', name='tag')
def fixture_tag(request, framework_version, processor, py_version):
provided_tag = request.config.getoption('--tag')
default_tag = '{}-{}-{}'.format(framework_version, processor, py_version)
return provided_tag if provided_tag else default_tag
@pytest.fixture(scope='session', name='docker_image')
def fixture_docker_image(docker_base_name, tag):
return '{}:{}'.format(docker_base_name, tag)
@pytest.fixture
def opt_ml():
tmp = tempfile.mkdtemp()
os.mkdir(os.path.join(tmp, 'output'))
# Docker cannot mount Mac OS /var folder properly see
# https://forums.docker.com/t/var-folders-isnt-mounted-properly/9600
opt_ml_dir = '/private{}'.format(tmp) if platform.system() == 'Darwin' else tmp
yield opt_ml_dir
shutil.rmtree(tmp, True)
@pytest.fixture(scope='session', name='use_gpu')
def fixture_use_gpu(processor):
return processor == 'gpu'
@pytest.fixture(scope='session', name='build_base_image', autouse=True)
def fixture_build_base_image(request, framework_version, py_version, processor, tag, docker_base_name):
build_base_image = request.config.getoption('--build-base-image')
if build_base_image:
return image_utils.build_base_image(framework_name=docker_base_name,
framework_version=framework_version,
py_version=py_version,
base_image_tag=tag,
processor=processor,
cwd=os.path.join(dir_path, '..'))
return tag
@pytest.fixture(scope='session', name='sagemaker_session')
def fixture_sagemaker_session(region):
return Session(boto_session=boto3.Session(region_name=region))
@pytest.fixture(scope='session', name='sagemaker_local_session')
def fixture_sagemaker_local_session(region):
return LocalSession(boto_session=boto3.Session(region_name=region))
@pytest.fixture(name='aws_id', scope='session')
def fixture_aws_id(request):
return request.config.getoption('--aws-id')
@pytest.fixture(name='instance_type', scope='session')
def fixture_instance_type(request, processor):
provided_instance_type = request.config.getoption('--instance-type')
default_instance_type = 'local' if processor == 'cpu' else 'local_gpu'
return provided_instance_type or default_instance_type
@pytest.fixture(name='accelerator_type', scope='session')
def fixture_accelerator_type(request):
return request.config.getoption('--accelerator-type')
@pytest.fixture(name='docker_registry', scope='session')
def fixture_docker_registry(aws_id, region):
return '{}.dkr.ecr.{}.amazonaws.com'.format(aws_id, region)
@pytest.fixture(name='ecr_image', scope='session')
def fixture_ecr_image(docker_registry, docker_base_name, tag):
return '{}/{}:{}'.format(docker_registry, docker_base_name, tag)
@pytest.fixture(autouse=True)
def skip_by_device_type(request, use_gpu, instance_type, accelerator_type):
is_gpu = use_gpu or instance_type[3] in ['g', 'p']
is_eia = accelerator_type is not None
# Separate out cases for clearer logic.
# When running GPU test, skip CPU test. When running CPU test, skip GPU test.
if (request.node.get_closest_marker('gpu_test') and not is_gpu) or \
(request.node.get_closest_marker('cpu_test') and is_gpu):
pytest.skip('Skipping because running on \'{}\' instance'.format(instance_type))
# When running EIA test, skip the CPU and GPU functions
elif (request.node.get_closest_marker('gpu_test') or request.node.get_closest_marker('cpu_test')) and is_eia:
pytest.skip('Skipping because running on \'{}\' instance'.format(instance_type))
# When running CPU or GPU test, skip EIA test.
elif request.node.get_closest_marker('eia_test') and not is_eia:
pytest.skip('Skipping because running on \'{}\' instance'.format(instance_type))
@pytest.fixture(autouse=True)
def skip_by_py_version(request, py_version):
if request.node.get_closest_marker('skip_py2') and py_version != 'py3':
pytest.skip('Skipping the test because Python 2 is not supported.')
@pytest.fixture(autouse=True)
def skip_gpu_instance_restricted_regions(region, instance_type):
if (region in NO_P2_REGIONS and instance_type.startswith('ml.p2')) \
or (region in NO_P3_REGIONS and instance_type.startswith('ml.p3')):
pytest.skip('Skipping GPU test in region {}'.format(region))
@pytest.fixture(autouse=True)
def skip_gpu_py2(request, use_gpu, instance_type, py_version, framework_version):
is_gpu = use_gpu or instance_type[3] in ['g', 'p']
if request.node.get_closest_marker('skip_gpu_py2') and is_gpu and py_version != 'py3' \
and framework_version == '1.4.0':
pytest.skip('Skipping the test until mms issue resolved.')
| return request.config.getoption('--docker-base-name') |
__init__.py | from .task_wrappers import task, create_task, run_generation_task
| from .gpu_map import bind_devices as _bind_devices, get_device
_bind_devices() |
|
test_file_svc.py | import os
import pytest
import yaml
from tests import AsyncMock
from asyncio import Future
from app.utility.file_decryptor import decrypt
@pytest.mark.usefixtures(
'init_base_world'
)
class TestFileService:
def test_save_file(self, loop, file_svc, tmp_path):
filename = "test_file.txt"
payload = b'These are the file contents.'
# Save temporary test file
loop.run_until_complete(file_svc.save_file(filename, payload, tmp_path, encrypt=False))
file_location = tmp_path / filename
# Read file contents from saved file
file_contents = open(file_location, "r")
assert os.path.isfile(file_location)
assert payload.decode("utf-8") == file_contents.read()
def test_create_exfil_sub_directory(self, loop, file_svc):
exfil_dir_name = 'unit-testing-Rocks'
new_dir = loop.run_until_complete(file_svc.create_exfil_sub_directory(exfil_dir_name))
assert os.path.isdir(new_dir)
os.rmdir(new_dir)
def test_read_write_result_file(self, tmpdir, file_svc):
link_id = '12345'
output = 'output testing unit' | output_data = file_svc.read_result_file(link_id=link_id, location=tmpdir)
assert output_data == output
def test_pack_file(self, loop, mocker, tmpdir, file_svc, data_svc):
payload = 'unittestpayload'
payload_content = b'content'
new_payload_content = b'new_content'
packer_name = 'test'
# create temp files
file = tmpdir.join(payload)
file.write(payload_content)
# start mocking up methods
packer = mocker.Mock(return_value=Future())
packer.return_value = packer
packer.pack = AsyncMock(return_value=(payload, new_payload_content))
data_svc.locate = AsyncMock(return_value=[])
module = mocker.Mock()
module.Packer = packer
file_svc.packers[packer_name] = module
file_svc.data_svc = data_svc
file_svc.read_file = AsyncMock(return_value=(payload, payload_content))
file_path, content, display_name = loop.run_until_complete(file_svc.get_file(headers=dict(file='%s:%s' % (packer_name, payload))))
packer.pack.assert_called_once()
assert payload == file_path
assert content == new_payload_content
def test_upload_file(self, loop, file_svc):
upload_dir = loop.run_until_complete(file_svc.create_exfil_sub_directory('test-upload'))
upload_filename = 'uploadedfile.txt'
upload_content = b'this is a test upload file'
loop.run_until_complete(file_svc.save_file(upload_filename, upload_content, upload_dir, encrypt=False))
uploaded_file_path = os.path.join(upload_dir, upload_filename)
assert os.path.isfile(uploaded_file_path)
with open(uploaded_file_path, 'rb') as file:
written_data = file.read()
assert written_data == upload_content
os.remove(uploaded_file_path)
os.rmdir(upload_dir)
def test_encrypt_upload(self, loop, file_svc):
upload_dir = loop.run_until_complete(file_svc.create_exfil_sub_directory('test-encrypted-upload'))
upload_filename = 'encryptedupload.txt'
upload_content = b'this is a test upload file'
loop.run_until_complete(file_svc.save_file(upload_filename, upload_content, upload_dir))
uploaded_file_path = os.path.join(upload_dir, upload_filename)
decrypted_file_path = upload_filename + '_decrypted'
config_to_use = 'conf/default.yml'
with open(config_to_use, encoding='utf-8') as conf:
config = list(yaml.load_all(conf, Loader=yaml.FullLoader))[0]
decrypt(uploaded_file_path, config, output_file=decrypted_file_path)
assert os.path.isfile(decrypted_file_path)
with open(decrypted_file_path, 'rb') as decrypted_file:
decrypted_data = decrypted_file.read()
assert decrypted_data == upload_content
os.remove(uploaded_file_path)
os.remove(decrypted_file_path)
os.rmdir(upload_dir) | # write output data
file_svc.write_result_file(link_id=link_id, output=output, location=tmpdir)
# read output data |
upm.go | package cloud
import (
"context"
"fmt"
"os"
"sort"
"strings"
"text/tabwriter"
semver "github.com/appscode/go-version"
api "github.com/pharmer/pharmer/apis/v1alpha1"
"github.com/pkg/errors"
"golang.org/x/crypto/ssh"
core "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/kubernetes"
)
type GenericUpgradeManager struct {
ctx context.Context
ssh SSHGetter
kc kubernetes.Interface
cluster *api.Cluster
}
var _ UpgradeManager = &GenericUpgradeManager{}
func NewUpgradeManager(ctx context.Context, ssh SSHGetter, kc kubernetes.Interface, cluster *api.Cluster) UpgradeManager {
return &GenericUpgradeManager{ctx: ctx, ssh: ssh, kc: kc, cluster: cluster}
}
func (upm *GenericUpgradeManager) GetAvailableUpgrades() ([]*api.Upgrade, error) {
// Collect the upgrades kubeadm can do in this list
upgrades := make([]*api.Upgrade, 0)
v := NewKubeVersionGetter(upm.kc, upm.cluster)
clusterVersionStr, clusterVersion, err := v.ClusterVersion()
if err != nil {
return nil, err
}
// Get current kubeadm CLI version
kubeadmVersionStr, kubeadmVersion, err := v.KubeadmVersion()
if err != nil {
return nil, err
}
// Get and output the current latest stable version
stableVersionStr, stableVersion, err := v.VersionFromCILabel("stable", "stable version")
if err != nil {
return nil, err
}
// Get the kubelet versions in the cluster
kubeletVersions, err := v.KubeletVersions()
if err != nil {
return nil, err
}
kubeDNSVersion, err := v.KubeDNSVersion()
if err != nil {
return nil, err
}
// Construct a descriptor for the current state of the world
beforeState := api.ClusterState{
KubeVersion: clusterVersionStr,
DNSVersion: kubeDNSVersion,
KubeadmVersion: kubeadmVersionStr,
KubeletVersions: kubeletVersions,
}
canDoMinorUpgrade := clusterVersion.LessThan(stableVersion)
// A patch version doesn't exist if the cluster version is higher than or equal to the current stable version
// in the case that a user is trying to upgrade from, let's say, v1.8.0-beta.2 to v1.8.0-rc.1 (given we support such upgrades experimentally)
// a stable-1.8 branch doesn't exist yet. Hence this check.
if patchVersionBranchExists(clusterVersion, stableVersion) {
currentBranch := getBranchFromVersion(clusterVersionStr)
versionLabel := fmt.Sprintf("stable-%s", currentBranch)
description := fmt.Sprintf("version in the v%s series", currentBranch)
// Get and output the latest patch version for the cluster branch
patchVersionStr, patchVersion, err := v.VersionFromCILabel(versionLabel, description)
if err != nil {
return nil, err
}
// Check if a minor version upgrade is possible when a patch release exists
// It's only possible if the latest patch version is higher than the current patch version
// If that's the case, they must be on different branches => a newer minor version can be upgraded to
canDoMinorUpgrade = minorUpgradePossibleWithPatchRelease(stableVersion, patchVersion)
// If the cluster version is lower than the newest patch version, we should inform about the possible upgrade
if patchUpgradePossible(clusterVersion, patchVersion) {
// The kubeadm version has to be upgraded to the latest patch version
newKubeadmVer := patchVersionStr
if kubeadmVersion.AtLeast(patchVersion) {
// In this case, the kubeadm CLI version is new enough. Don't display an update suggestion for kubeadm by making .NewKubeadmVersion equal .CurrentKubeadmVersion
newKubeadmVer = kubeadmVersionStr
}
upgrades = append(upgrades, &api.Upgrade{
Description: description,
Before: beforeState,
After: api.ClusterState{
KubeVersion: patchVersionStr,
DNSVersion: kubeDNSVersion,
KubeadmVersion: newKubeadmVer,
// KubeletVersions is unset here as it is not used anywhere in .After
},
})
}
}
if canDoMinorUpgrade {
upgrades = append(upgrades, &api.Upgrade{
Description: "stable version",
Before: beforeState,
After: api.ClusterState{
KubeVersion: stableVersionStr,
DNSVersion: kubeDNSVersion,
KubeadmVersion: stableVersionStr,
// KubeletVersions is unset here as it is not used anywhere in .After
},
})
}
return upgrades, nil
}
func (upm *GenericUpgradeManager) ExecuteSSHCommand(command string, node *core.Node) (string, error) {
cfg, err := upm.ssh.GetSSHConfig(upm.cluster, node)
if err != nil {
return "", err
}
keySigner, err := ssh.ParsePrivateKey(cfg.PrivateKey)
if err != nil {
return "", err
}
config := &ssh.ClientConfig{
User: cfg.User,
Auth: []ssh.AuthMethod{
ssh.PublicKeys(keySigner),
},
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
}
return ExecuteTCPCommand(command, fmt.Sprintf("%v:%v", cfg.HostIP, cfg.HostPort), config)
}
// printAvailableUpgrades prints a UX-friendly overview of what versions are available to upgrade to
// TODO look into columnize or some other formatter when time permits instead of using the tabwriter
func (upm *GenericUpgradeManager) PrintAvailableUpgrades(upgrades []*api.Upgrade) {
// Return quickly if no upgrades can be made
if len(upgrades) == 0 {
fmt.Println("Awesome, you're up-to-date! Enjoy!")
return
}
w := os.Stdout
// The tab writer writes to the "real" writer w
tabw := tabwriter.NewWriter(w, 10, 4, 3, ' ', 0)
// Loop through the upgrade possibilities and output text to the command line
for _, upgrade := range upgrades {
if upgrade.CanUpgradeKubelets() {
fmt.Fprintln(w, "Components that will be upgraded after you've upgraded the control plane:")
fmt.Fprintln(tabw, "COMPONENT\tCURRENT\tAVAILABLE")
firstPrinted := false
// The map is of the form <old-version>:<node-count>. Here all the keys are put into a slice and sorted
// in order to always get the right order. Then the map value is extracted separately
for _, oldVersion := range sortedSliceFromStringIntMap(upgrade.Before.KubeletVersions) {
nodeCount := upgrade.Before.KubeletVersions[oldVersion]
if !firstPrinted {
// Output the Kubelet header only on the first version pair
fmt.Fprintf(tabw, "Kubelet\t%d x %s\t%s\n", nodeCount, oldVersion, upgrade.After.KubeVersion)
firstPrinted = true
continue
}
fmt.Fprintf(tabw, "\t\t%d x %s\t%s\n", nodeCount, oldVersion, upgrade.After.KubeVersion)
}
// We should flush the writer here at this stage; as the columns will now be of the right size, adjusted to the above content
tabw.Flush()
fmt.Fprintln(w, "")
}
fmt.Fprintf(w, "Upgrade to the latest %s:\n", upgrade.Description)
fmt.Fprintln(w, "")
fmt.Fprintln(tabw, "COMPONENT\tCURRENT\tAVAILABLE")
fmt.Fprintf(tabw, "API Server\t%s\t%s\n", upgrade.Before.KubeVersion, upgrade.After.KubeVersion)
fmt.Fprintf(tabw, "Controller Manager\t%s\t%s\n", upgrade.Before.KubeVersion, upgrade.After.KubeVersion)
fmt.Fprintf(tabw, "Scheduler\t%s\t%s\n", upgrade.Before.KubeVersion, upgrade.After.KubeVersion)
fmt.Fprintf(tabw, "Kube Proxy\t%s\t%s\n", upgrade.Before.KubeVersion, upgrade.After.KubeVersion)
fmt.Fprintf(tabw, "Kube DNS\t%s\t%s\n", upgrade.Before.DNSVersion, upgrade.After.DNSVersion)
// The tabwriter should be flushed at this stage as we have now put in all the required content for this time. This is required for the tabs' size to be correct.
tabw.Flush()
fmt.Fprintln(w, "")
fmt.Fprintln(w, "You can now apply the upgrade by executing the following command:")
fmt.Fprintln(w, "")
fmt.Fprintf(w, "\tpharmer edit cluster %s --kubernetes-version=%s\n", upm.cluster.Name, upgrade.After.KubeVersion)
fmt.Fprintln(w, "")
if upgrade.Before.KubeadmVersion != upgrade.After.KubeadmVersion {
fmt.Fprintf(w, "Note: Before you do can perform this upgrade, you have to update kubeadm to %s\n", upgrade.After.KubeadmVersion)
fmt.Fprintln(w, "")
}
fmt.Fprintln(w, "_____________________________________________________________________")
fmt.Fprintln(w, "")
}
}
func (upm *GenericUpgradeManager) Apply(dryRun bool) (acts []api.Action, err error) {
acts = append(acts, api.Action{
Action: api.ActionUpdate,
Resource: "Master upgrade",
Message: fmt.Sprintf("Master instance will be upgraded to %v", upm.cluster.Spec.KubernetesVersion),
})
if !dryRun {
if err = upm.MasterUpgrade(); err != nil {
return
}
// wait for nodes to start
if err = WaitForReadyMaster(upm.ctx, upm.kc); err != nil {
return
}
}
var nodeGroups []*api.NodeGroup
if nodeGroups, err = Store(upm.ctx).NodeGroups(upm.cluster.Name).List(metav1.ListOptions{}); err != nil {
return
}
acts = append(acts, api.Action{
Action: api.ActionUpdate,
Resource: "Node group upgrade",
Message: fmt.Sprintf("Node group will be upgraded to %v", upm.cluster.Spec.KubernetesVersion),
})
if !dryRun {
for _, ng := range nodeGroups {
if ng.IsMaster() {
continue
}
if err = upm.NodeGroupUpgrade(ng); err != nil {
return
}
}
}
return
}
func (upm *GenericUpgradeManager) MasterUpgrade() error {
var masterInstance *core.Node
var err error
masterInstances, err := upm.kc.CoreV1().Nodes().List(metav1.ListOptions{
LabelSelector: labels.SelectorFromSet(map[string]string{
api.RoleMasterKey: "",
}).String(),
})
if err != nil {
return err
}
if len(masterInstances.Items) == 1 {
masterInstance = &masterInstances.Items[0]
} else if len(masterInstances.Items) > 1 {
return errors.Errorf("multiple master found")
} else {
return errors.Errorf("no master found")
}
desiredVersion, _ := semver.NewVersion(upm.cluster.Spec.KubernetesVersion)
currentVersion, _ := semver.NewVersion(masterInstance.Status.NodeInfo.KubeletVersion)
v11, err := semver.NewVersion("1.11.0")
if err != nil {
return err
}
// ref: https://stackoverflow.com/a/2831449/244009
steps := []string{
`echo "#!/bin/bash" > /usr/bin/pharmer.sh`,
`echo "set -xeou pipefail" >> /usr/bin/pharmer.sh`,
`echo "export DEBIAN_FRONTEND=noninteractive" >> /usr/bin/pharmer.sh`,
`echo "export DEBCONF_NONINTERACTIVE_SEEN=true" >> /usr/bin/pharmer.sh`,
`echo "" >> /usr/bin/pharmer.sh`,
`echo "apt-get update" >> /usr/bin/pharmer.sh`,
}
if !desiredVersion.Equal(currentVersion) {
patch := desiredVersion.Clone().ToMutator().ResetPrerelease().ResetMetadata().String()
minor := desiredVersion.Clone().ToMutator().ResetPrerelease().ResetMetadata().ResetPatch().String()
cni, found := kubernetesCNIVersions[minor]
if !found {
return errors.Errorf("kubernetes-cni version is unknown for Kubernetes version %s", desiredVersion)
}
prekVer, found := prekVersions[minor]
if !found {
return errors.Errorf("pre-k version is unknown for Kubernetes version %s", desiredVersion)
}
// Keep using forked kubeadm 1.8.x for: https://github.com/kubernetes/kubernetes/pull/49840
if minor == "1.8.0" {
steps = append(steps, fmt.Sprintf(`echo "apt-get upgrade -y kubelet=%s* kubectl=%s* kubernetes-cni=%s*" >> /usr/bin/pharmer.sh`, patch, patch, cni))
} else if desiredVersion.LessThan(v11) {
steps = append(steps, fmt.Sprintf(`echo "apt-get upgrade -y kubelet=%s* kubectl=%s* kubeadm=%s* kubernetes-cni=%s*" >> /usr/bin/pharmer.sh`, patch, patch, patch, cni))
} else {
steps = append(steps, []string{
`echo "curl -sSL https://dl.k8s.io/release/$(curl -sSL https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubeadm > /usr/bin/kubeadm" >> /usr/bin/pharmer.sh`,
`echo "chmod a+rx /usr/bin/kubeadm" >> /usr/bin/pharmer.sh`,
}...)
}
steps = append(steps, fmt.Sprintf(`echo "curl -fsSL --retry 5 -o pre-k https://cdn.appscode.com/binaries/pre-k/%s/pre-k-linux-amd64 && chmod +x pre-k && mv pre-k /usr/bin/" >> /usr/bin/pharmer.sh`, prekVer))
}
steps = append(steps,
fmt.Sprintf(`echo "pre-k check master-status --timeout=-1s --kubeconfig=/etc/kubernetes/admin.conf" >> /usr/bin/pharmer.sh`))
steps = append(steps,
fmt.Sprintf(`echo "kubeadm upgrade apply %v -y" >> /usr/bin/pharmer.sh`, upm.cluster.Spec.KubernetesVersion))
if desiredVersion.Compare(v11) >= 0 {
steps = append(steps,
fmt.Sprintf(`echo "kubectl drain %s --ignore-daemonsets" >> /usr/bin/pharmer.sh`, masterInstance.Name),
fmt.Sprintf(`echo "apt-get upgrade -y kubelet kubeadm" >> /usr/bin/pharmer.sh`),
fmt.Sprintf(`echo "kubectl uncordon %s" >> /usr/bin/pharmer.sh`, masterInstance.Name))
}
steps = append(steps,
`chmod +x /usr/bin/pharmer.sh`,
`nohup /usr/bin/pharmer.sh >> /var/log/pharmer.log 2>&1 &`,
)
cmd := fmt.Sprintf("sh -c '%s'", strings.Join(steps, "; "))
Logger(upm.ctx).Infof("Upgrading server %s using `%s`", masterInstance.Name, cmd)
if _, err = upm.ExecuteSSHCommand(cmd, masterInstance); err != nil {
return err
}
return nil
}
func (upm *GenericUpgradeManager) NodeGroupUpgrade(ng *api.NodeGroup) (err error) {
nodes := &core.NodeList{}
if upm.kc != nil {
nodes, err = upm.kc.CoreV1().Nodes().List(metav1.ListOptions{
LabelSelector: labels.SelectorFromSet(map[string]string{
api.NodePoolKey: ng.Name,
}).String(),
})
if err != nil {
return
}
}
desiredVersion, _ := semver.NewVersion(upm.cluster.Spec.KubernetesVersion)
v11, err := semver.NewVersion("1.11.0")
if err != nil {
return err
}
for _, node := range nodes.Items {
currentVersion, _ := semver.NewVersion(node.Status.NodeInfo.KubeletVersion)
if !desiredVersion.Equal(currentVersion) {
patch := desiredVersion.Clone().ToMutator().ResetPrerelease().ResetMetadata().String()
minor := desiredVersion.Clone().ToMutator().ResetPrerelease().ResetMetadata().ResetPatch().String()
cni, found := kubernetesCNIVersions[minor]
if !found {
return errors.Errorf("kubernetes-cni version is unknown for Kubernetes version %s", desiredVersion)
}
prekVer, found := prekVersions[minor]
if !found {
return errors.Errorf("pre-k version is unknown for Kubernetes version %s", desiredVersion)
}
// ref: https://stackoverflow.com/a/2831449/244009
steps := []string{
`echo "#!/bin/bash" > /usr/bin/pharmer.sh`,
`echo "set -xeou pipefail" >> /usr/bin/pharmer.sh`,
`echo "export DEBIAN_FRONTEND=noninteractive" >> /usr/bin/pharmer.sh`,
`echo "export DEBCONF_NONINTERACTIVE_SEEN=true" >> /usr/bin/pharmer.sh`,
`echo "" >> /usr/bin/pharmer.sh`,
`echo "apt-get update" >> /usr/bin/pharmer.sh`,
}
// Keep using forked kubeadm 1.8.x for: https://github.com/kubernetes/kubernetes/pull/49840
if minor == "1.8.0" {
steps = append(steps,
fmt.Sprintf(`echo "apt-get upgrade -y kubelet=%s* kubectl=%s* kubernetes-cni=%s*" >> /usr/bin/pharmer.sh`, patch, patch, cni),
)
} else {
steps = append(steps,
fmt.Sprintf(`echo "apt-get upgrade -y kubelet=%s* kubectl=%s* kubeadm=%s* kubernetes-cni=%s*" >> /usr/bin/pharmer.sh`, patch, patch, patch, cni),
)
}
if desiredVersion.Compare(v11) >= 0 {
steps = append(steps,
fmt.Sprintf(`echo "kubeadm upgrade node config --kubelet-version \$(kubelet --version | cut -d '"'"' '"'"' -f 2)" >> /usr/bin/pharmer.sh`))
}
steps = append(steps,
fmt.Sprintf(`echo "curl -fsSL --retry 5 -o pre-k https://cdn.appscode.com/binaries/pre-k/%s/pre-k-linux-amd64 && chmod +x pre-k && mv pre-k /usr/bin/" >> /usr/bin/pharmer.sh`, prekVer),
`echo "systemctl restart kubelet" >> /usr/bin/pharmer.sh`,
`chmod +x /usr/bin/pharmer.sh`,
`nohup /usr/bin/pharmer.sh >> /var/log/pharmer.log 2>&1 &`,
)
cmd := fmt.Sprintf("sh -c '%s'", strings.Join(steps, "; "))
Logger(upm.ctx).Infof("Upgrading server %s using `%s`", node.Name, cmd)
if _, err = upm.ExecuteSSHCommand(cmd, &node); err != nil {
return err
}
}
}
return nil
}
// sortedSliceFromStringIntMap returns a slice of the keys in the map sorted alphabetically
func sortedSliceFromStringIntMap(strMap map[string]uint32) []string | {
strSlice := []string{}
for k := range strMap {
strSlice = append(strSlice, k)
}
sort.Strings(strSlice)
return strSlice
} |
|
choose_utils.rs |
pub fn | (n: u64, k: u64) -> u64 {
let mut res = 1;
for i in 0..k {
res = (res * (n - i)) /
(i + 1);
}
res
} | n_choose_k |
notification.interceptor.js | (function() {
'use strict';
angular
.module('quisListingApp')
.factory('notificationInterceptor', notificationInterceptor);
notificationInterceptor.$inject = ['$q', 'AlertService'];
function notificationInterceptor ($q, AlertService) {
var service = {
response: response
};
return service; | }).sort();
var alertKey = response.headers(headers[0]);
if (angular.isString(alertKey)) {
AlertService.success(alertKey, { param : response.headers(headers[1])});
}
return response;
}
}
})(); |
function response (response) {
var headers = Object.keys(response.headers()).filter(function (header) {
return header.indexOf('app-alert', header.length - 'app-alert'.length) !== -1 || header.indexOf('app-params', header.length - 'app-params'.length) !== -1; |
cs.js | //! moment.js locale configuration
//! locale : czech (cs)
//! author : petrbela : https://github.com/petrbela
;(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined'
&& typeof require === 'function' ? factory(require('../moment')) :
typeof define === 'function' && define.amd ? define(['../moment'], factory) :
factory(global.moment)
}(this, function (moment) { 'use strict';
var months = 'leden_únor_březen_duben_květen_červen_červenec_srpen_září_říjen_listopad_prosinec'.split('_'),
monthsShort = 'led_úno_bře_dub_kvě_čvn_čvc_srp_zář_říj_lis_pro'.split('_');
function plural(n) {
return (n > 1) && (n < 5) && (~~(n / 10) !== 1);
}
function translate(number, w | fix, key, isFuture) {
var result = number + ' ';
switch (key) {
case 's': // a few seconds / in a few seconds / a few seconds ago
return (withoutSuffix || isFuture) ? 'pár sekund' : 'pár sekundami';
case 'm': // a minute / in a minute / a minute ago
return withoutSuffix ? 'minuta' : (isFuture ? 'minutu' : 'minutou');
case 'mm': // 9 minutes / in 9 minutes / 9 minutes ago
if (withoutSuffix || isFuture) {
return result + (plural(number) ? 'minuty' : 'minut');
} else {
return result + 'minutami';
}
break;
case 'h': // an hour / in an hour / an hour ago
return withoutSuffix ? 'hodina' : (isFuture ? 'hodinu' : 'hodinou');
case 'hh': // 9 hours / in 9 hours / 9 hours ago
if (withoutSuffix || isFuture) {
return result + (plural(number) ? 'hodiny' : 'hodin');
} else {
return result + 'hodinami';
}
break;
case 'd': // a day / in a day / a day ago
return (withoutSuffix || isFuture) ? 'den' : 'dnem';
case 'dd': // 9 days / in 9 days / 9 days ago
if (withoutSuffix || isFuture) {
return result + (plural(number) ? 'dny' : 'dní');
} else {
return result + 'dny';
}
break;
case 'M': // a month / in a month / a month ago
return (withoutSuffix || isFuture) ? 'měsíc' : 'měsícem';
case 'MM': // 9 months / in 9 months / 9 months ago
if (withoutSuffix || isFuture) {
return result + (plural(number) ? 'měsíce' : 'měsíců');
} else {
return result + 'měsíci';
}
break;
case 'y': // a year / in a year / a year ago
return (withoutSuffix || isFuture) ? 'rok' : 'rokem';
case 'yy': // 9 years / in 9 years / 9 years ago
if (withoutSuffix || isFuture) {
return result + (plural(number) ? 'roky' : 'let');
} else {
return result + 'lety';
}
break;
}
}
var cs = moment.defineLocale('cs', {
months : months,
monthsShort : monthsShort,
monthsParse : (function (months, monthsShort) {
var i, _monthsParse = [];
for (i = 0; i < 12; i++) {
// use custom parser to solve problem with July (červenec)
_monthsParse[i] = new RegExp('^' + months[i] + '$|^' + monthsShort[i] + '$', 'i');
}
return _monthsParse;
}(months, monthsShort)),
shortMonthsParse : (function (monthsShort) {
var i, _shortMonthsParse = [];
for (i = 0; i < 12; i++) {
_shortMonthsParse[i] = new RegExp('^' + monthsShort[i] + '$', 'i');
}
return _shortMonthsParse;
}(monthsShort)),
longMonthsParse : (function (months) {
var i, _longMonthsParse = [];
for (i = 0; i < 12; i++) {
_longMonthsParse[i] = new RegExp('^' + months[i] + '$', 'i');
}
return _longMonthsParse;
}(months)),
weekdays : 'neděle_pondělí_úterý_středa_čtvrtek_pátek_sobota'.split('_'),
weekdaysShort : 'ne_po_út_st_čt_pá_so'.split('_'),
weekdaysMin : 'ne_po_út_st_čt_pá_so'.split('_'),
longDateFormat : {
LT: 'H:mm',
LTS : 'H:mm:ss',
L : 'DD.MM.YYYY',
LL : 'D. MMMM YYYY',
LLL : 'D. MMMM YYYY H:mm',
LLLL : 'dddd D. MMMM YYYY H:mm'
},
calendar : {
sameDay: '[dnes v] LT',
nextDay: '[zítra v] LT',
nextWeek: function () {
switch (this.day()) {
case 0:
return '[v neděli v] LT';
case 1:
case 2:
return '[v] dddd [v] LT';
case 3:
return '[ve středu v] LT';
case 4:
return '[ve čtvrtek v] LT';
case 5:
return '[v pátek v] LT';
case 6:
return '[v sobotu v] LT';
}
},
lastDay: '[včera v] LT',
lastWeek: function () {
switch (this.day()) {
case 0:
return '[minulou neděli v] LT';
case 1:
case 2:
return '[minulé] dddd [v] LT';
case 3:
return '[minulou středu v] LT';
case 4:
case 5:
return '[minulý] dddd [v] LT';
case 6:
return '[minulou sobotu v] LT';
}
},
sameElse: 'L'
},
relativeTime : {
future : 'za %s',
past : 'před %s',
s : translate,
m : translate,
mm : translate,
h : translate,
hh : translate,
d : translate,
dd : translate,
M : translate,
MM : translate,
y : translate,
yy : translate
},
ordinalParse : /\d{1,2}\./,
ordinal : '%d.',
week : {
dow : 1, // Monday is the first day of the week.
doy : 4 // The week that contains Jan 4th is the first week of the year.
}
});
return cs;
})); | ithoutSuf |
buttons-test.rs | /*
* Copyright (c) 2017-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
extern crate gdk;
extern crate gtk;
#[macro_use]
extern crate relm;
#[macro_use]
extern crate relm_derive;
#[macro_use]
extern crate gtk_test;
#[macro_use]
extern crate relm_test;
use gdk::EventType::DoubleButtonPress;
use gtk::{
ButtonExt,
Inhibit,
LabelExt,
Menu,
MenuItem,
MenuShellExt,
OrientableExt,
ToolButtonExt,
GtkMenuItemExt,
WidgetExt,
};
use gtk::Orientation::Vertical;
use relm::{Relm, Widget, timeout};
use relm_derive::widget;
use self::Msg::*;
use self::LabelMsg::*;
pub struct LabelModel {
text: String,
}
#[derive(Clone, Msg)]
pub enum LabelMsg {
Click,
DblClick,
Text(String),
}
#[widget]
impl Widget for ClickableLabel {
fn model() -> LabelModel {
LabelModel {
text: String::new(),
}
}
fn update(&mut self, event: LabelMsg) {
match event {
// To be listened to by the user.
Click | DblClick => (),
Text(text) => self.model.text = text,
}
}
view! {
gtk::EventBox {
button_press_event(_, event) => ({
if event.get_event_type() == DoubleButtonPress {
DblClick
}
else {
Click
}
}, Inhibit(false)),
#[name="label"]
gtk::Label {
name: "label",
text: &self.model.text,
},
},
}
}
#[derive(Clone)]
pub struct Model {
counter: i32,
inc_text: String,
relm: Relm<Win>,
text: String,
}
#[derive(Clone, Msg)]
pub enum Msg {
Decrement,
DoubleClick,
FiveInc,
GetModel,
Increment,
RecvModel(Model),
Quit,
TwoInc(i32, i32),
UpdateText,
UpdateTextNow,
}
#[widget]
impl Widget for Win {
fn init_view(&mut self) {
let menu = Menu::new();
let inc = MenuItem::new_with_label("Increment");
connect!(self.model.relm, inc, connect_activate(_), Increment);
menu.append(&inc);
self.menu_action.set_submenu(Some(&menu));
self.menu_bar.show_all();
}
fn model(relm: &Relm<Self>, _: ()) -> Model |
fn update(&mut self, event: Msg) {
match event {
Decrement => self.model.counter -= 1,
DoubleClick => self.model.inc_text = "Double click".to_string(),
// To be listened to by the user.
FiveInc => (),
GetModel => self.model.relm.stream().emit(RecvModel(self.model.clone())),
Increment => {
self.model.counter += 1;
if self.model.counter == 2 {
self.model.relm.stream().emit(TwoInc(1, 2));
}
if self.model.counter == 5 {
self.model.relm.stream().emit(FiveInc);
}
},
// To be listened to by the user.
RecvModel(_) => (),
Quit => gtk::main_quit(),
// To be listened to by the user.
TwoInc(_, _) => (),
UpdateText => timeout(self.model.relm.stream(), 100, || UpdateTextNow),
UpdateTextNow => self.model.text = "Updated text".to_string(),
}
}
view! {
gtk::Window {
gtk::Box {
#[name="menu_bar"]
gtk::MenuBar {
#[name="menu_action"]
gtk::MenuItem {
label: "Action",
},
},
gtk::Toolbar {
#[name="inc_tool_button"]
gtk::ToolButton {
label: "Increment",
clicked => Increment,
},
},
orientation: Vertical,
#[name="inc_button"]
gtk::Button {
clicked => Increment,
label: "+",
},
#[name="label"]
gtk::Label {
text: &self.model.counter.to_string(),
},
#[name="dec_button"]
gtk::Button {
clicked => Decrement,
label: "-",
},
#[name="text"]
gtk::Label {
text: &self.model.text,
},
#[name="update_button"]
gtk::Button {
clicked => UpdateText,
label: "Update text",
},
#[name="inc_label"]
ClickableLabel {
Click => Increment,
DblClick => DoubleClick,
Text: self.model.inc_text.clone(),
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
}
}
#[cfg(test)]
mod tests {
use gtk::{
Cast,
ContainerExt,
LabelExt,
Menu,
MenuItem,
GtkMenuItemExt,
};
use relm;
use gtk_test::{
click,
double_click,
find_widget_by_name,
wait,
};
use relm_test::{
Observer,
};
use Msg::{self, FiveInc, GetModel, RecvModel, TwoInc};
use LabelMsg::Text;
use Win;
#[test]
fn label_change() {
let (component, widgets) = relm::init_test::<Win>(()).expect("init relm test");
let inc_button = &widgets.inc_button;
let dec_button = &widgets.dec_button;
let update_button = &widgets.update_button;
let inc_tool_button = &widgets.inc_tool_button;
let inc_label = &widgets.inc_label;
// Observe for messages.
let observer = Observer::new(component.stream(), |msg|
if let FiveInc = msg {
true
}
else {
false
}
);
let label_observer = relm_observer_new!(inc_label, Text(_));
// Shortcut for the previous call to Observer::new().
let two_observer = relm_observer_new!(component, TwoInc(_, _));
let model_observer = Observer::new(component.stream(), |msg|
if let RecvModel(_) = msg {
true
}
else {
false
}
);
assert_text!(widgets.label, 0);
click(inc_button);
assert_text!(widgets.label, 1);
click(inc_button);
assert_text!(widgets.label, 2);
// Shortcut for the call to wait() below.
relm_observer_wait!(let TwoInc(one, two) = two_observer);
assert_eq!(one, 1);
assert_eq!(two, 2);
click(dec_button);
assert_text!(widgets.label, 1);
click(inc_button);
assert_text!(widgets.label, 2);
relm_observer_wait!(let Msg::TwoInc(one, two) = two_observer);
assert_eq!(one, 1);
assert_eq!(two, 2);
click(dec_button);
assert_text!(widgets.label, 1);
click(dec_button);
assert_text!(widgets.label, 0);
click(dec_button);
assert_text!(widgets.label, -1);
for _ in 0..6 {
click(inc_button);
}
// Wait to receive the message on this observer.
observer.wait();
// Ask for the model. This will emit RecvModel.
component.stream().emit(GetModel);
let msg = model_observer.wait();
if let RecvModel(model) = msg {
assert_eq!(model.counter, 5);
}
else {
panic!("Wrong message type.");
}
component.stream().emit(GetModel);
relm_observer_wait!(let RecvModel(model) = model_observer);
assert_eq!(model.counter, 5);
let action_menu: MenuItem = widgets.menu_bar.get_children()[0].clone().downcast().expect("menu item 2");
click(&action_menu);
let menu: Menu = action_menu.get_submenu().expect("menu 2").downcast().expect("menu 3");
let inc_menu: MenuItem = menu.get_children()[0].clone().downcast().expect("menu item");
click(&inc_menu);
assert_text!(widgets.label, 6);
click(inc_tool_button);
assert_text!(widgets.label, 7);
let inc_label = inc_label.widget();
click(inc_label);
assert_text!(widgets.label, 8);
assert_text!(widgets.text, "");
click(update_button);
assert_text!(widgets.text, "");
wait(200);
assert_text!(widgets.text, "Updated text");
let inc_label = find_widget_by_name(inc_label, "label").expect("find label");
double_click(&inc_label);
relm_observer_wait!(let Text(text) = label_observer);
assert_eq!(text, "Double click");
assert_text!(widgets.label, 10);
}
/*
* Starting gtk multiple in a different thread is forbidden.
#[test]
fn clickable_label() {
let (component, widgets) = relm::init_test::<ClickableLabel>(()).expect("init relm test");
let label = &widgets.label;
assert_text!(label, "");
component.stream().emit(Text("Test".to_string()));
wait(200);
assert_text!(label, "Test");
}*/
}
| {
Model {
counter: 0,
inc_text: "Increment".to_string(),
relm: relm.clone(),
text: String::new(),
}
} |
entry_wildcard.go | package classpath
import "os"
import "path/filepath"
import (
"strings"
)
func newWildcardEntry(path string) CompositeEntry | {
baseDir := path[:len(path)-1] //remove
compositeEntry := []Entry{}
walkFn := func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() && path != baseDir {
return filepath.SkipDir
}
if strings.HasSuffix(path, ".jar") || strings.HasSuffix(path, ".JAR") {
jarEntry := newZipEntry(path)
compositeEntry = append(compositeEntry, jarEntry)
}
return nil
}
filepath.Walk(baseDir, walkFn)
return compositeEntry
} |
|
text_hittester.rs | use sdl2::rect::Point as SDLPoint;
use sdl2::rect::Rect as SDLRect;
use crate::props::HitTestResult;
pub struct TextHitTester {
entries: Vec<(SDLRect, HitTestResult)>,
}
impl TextHitTester {
pub fn init() -> TextHitTester {
TextHitTester { entries: vec![] }
}
pub fn add(&mut self, rect: SDLRect, result: HitTestResult) {
self.entries.push((rect, result));
}
pub fn clear(&mut self) {
self.entries.clear();
}
pub fn | (&self, x: i32, y: i32) -> Option<HitTestResult> {
let point = SDLPoint::new(x, y);
for e in &self.entries {
if e.0.contains_point(point) {
return Some(e.1.clone());
}
}
None
}
}
| hit_test |
tests_timeout.py | import socket
import time
from xmlrpclib_to import ServerProxy
import httpretty
import pytest
XML_RESPONSE = """<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Test</string></value>
</param>
</params>
</methodResponse>"""
def timeout(request, url, headers):
|
@httpretty.activate
def test_timeout():
httpretty.register_uri(
httpretty.POST,
'http://example.com/RPC2',
content_type='text/xml',
body=timeout
)
proxy = ServerProxy('http://example.com', timeout=0.5)
with pytest.raises(socket.timeout):
proxy.test()
@httpretty.activate
def test_timeout_https():
httpretty.register_uri(
httpretty.POST,
'https://example.com/RPC2',
content_type='text/xml',
body=timeout
)
proxy = ServerProxy('https://example.com', timeout=0.5)
with pytest.raises(socket.timeout):
proxy.test()
if __name__ == "__main__":
test_timeout()
test_timeout_https()
| time.sleep(1)
return 200, headers, XML_RESPONSE |
navigation.js | import { Navigation } from 'react-native-navigation';
export function | () {
Navigation.startSingleScreenApp({
screen: {
screen: 'LoadingScreen', // unique ID registered with Navigation.registerScreen
title: 'clARity', // title of the screen as appears in the nav bar (optional)
navigatorStyle: {
'navBarHidden': true,
}, // override the navigator style for the screen, see "Styling the navigator" below (optional)
navigatorButtons: {} // override the nav buttons for the screen, see "Adding buttons to the navigator" below (optional)
},
});
}; | registerNavigation |
main.go | package main
import (
// Make sure dep tools picks up these dependencies
_ "github.com/go-openapi/loads"
_ "k8s.io/apimachinery/pkg/apis/meta/v1"
_ "github.com/infobloxopen/konk/test/apiserver/pkg/auth/remoteheader"
_ "k8s.io/client-go/plugin/pkg/client/auth" // Enable cloud provider auth
"sigs.k8s.io/apiserver-builder-alpha/pkg/cmd/server"
"github.com/infobloxopen/konk/test/apiserver/pkg/apis"
"github.com/infobloxopen/konk/test/apiserver/pkg/openapi"
// _ "github.com/infobloxopen/konk/test/apiserver/plugin/admission/install"
)
func main() {
version := "v0"
err := server.StartApiServerWithOptions(&server.StartOptions{
EtcdPath: "/registry/infoblox.com",
Apis: apis.GetAllApiBuilders(),
Openapidefs: openapi.GetOpenAPIDefinitions,
Title: "Api",
Version: version,
// TweakConfigFuncs []func(apiServer *apiserver.Config) error | panic(err)
}
} | // FlagConfigFuncs []func(*cobra.Command) error
})
if err != nil { |
celery_app.py | import os
from celery import Celery
| app = Celery("{{cookiecutter.project_slug}}")
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings", namespace="CELERY")
# Load task modules from all registered Django app configs.
app.autodiscover_tasks() | # set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{cookiecutter.project_slug}}.settings")
|
runner.py | from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
| help = 'Run arbitrary script within Django environment'
option_list = BaseCommand.option_list + (
make_option('--file', dest='file', help='Path to Python script'),
)
def handle(self, **options):
file = options['file']
if file is None:
raise CommandError('--file option is required')
execfile(options['file']) |
|
health_check_configurer.go | package clusters
import (
"encoding/hex"
envoy_cluster "github.com/envoyproxy/go-control-plane/envoy/config/cluster/v3"
envoy_core "github.com/envoyproxy/go-control-plane/envoy/config/core/v3"
envoy_type "github.com/envoyproxy/go-control-plane/envoy/type/v3"
"github.com/pkg/errors"
"google.golang.org/protobuf/types/known/wrapperspb"
mesh_proto "github.com/kumahq/kuma/api/mesh/v1alpha1"
"github.com/kumahq/kuma/pkg/core"
core_mesh "github.com/kumahq/kuma/pkg/core/resources/apis/mesh"
)
type HealthCheckConfigurer struct {
HealthCheck *core_mesh.HealthCheckResource
Protocol core_mesh.Protocol
}
var _ ClusterConfigurer = &HealthCheckConfigurer{}
func mapUInt32ToInt64Range(value uint32) *envoy_type.Int64Range {
return &envoy_type.Int64Range{
Start: int64(value),
End: int64(value) + 1,
}
}
func mapHttpHeaders(
headers []*mesh_proto.HealthCheck_Conf_Http_HeaderValueOption,
) []*envoy_core.HeaderValueOption {
var envoyHeaders []*envoy_core.HeaderValueOption
for _, header := range headers {
envoyHeaders = append(envoyHeaders, &envoy_core.HeaderValueOption{
Header: &envoy_core.HeaderValue{
Key: header.Header.Key,
Value: header.Header.Value,
},
Append: header.Append,
})
}
return envoyHeaders
}
func | (
tcpConf *mesh_proto.HealthCheck_Conf_Tcp,
) *envoy_core.HealthCheck_TcpHealthCheck_ {
tcpHealthCheck := envoy_core.HealthCheck_TcpHealthCheck{}
if tcpConf.Send != nil {
tcpHealthCheck.Send = &envoy_core.HealthCheck_Payload{
Payload: &envoy_core.HealthCheck_Payload_Text{
Text: hex.EncodeToString(tcpConf.Send.Value),
},
}
}
if tcpConf.Receive != nil {
var receive []*envoy_core.HealthCheck_Payload
for _, r := range tcpConf.Receive {
receive = append(receive, &envoy_core.HealthCheck_Payload{
Payload: &envoy_core.HealthCheck_Payload_Text{
Text: hex.EncodeToString(r.Value),
},
})
}
tcpHealthCheck.Receive = receive
}
return &envoy_core.HealthCheck_TcpHealthCheck_{
TcpHealthCheck: &tcpHealthCheck,
}
}
func httpHealthCheck(
protocol core_mesh.Protocol,
httpConf *mesh_proto.HealthCheck_Conf_Http,
) *envoy_core.HealthCheck_HttpHealthCheck_ {
var expectedStatuses []*envoy_type.Int64Range
for _, status := range httpConf.ExpectedStatuses {
expectedStatuses = append(
expectedStatuses,
mapUInt32ToInt64Range(status.Value),
)
}
codecClientType := envoy_type.CodecClientType_HTTP1
if protocol == core_mesh.ProtocolHTTP2 {
codecClientType = envoy_type.CodecClientType_HTTP2
}
httpHealthCheck := envoy_core.HealthCheck_HttpHealthCheck{
Path: httpConf.Path,
RequestHeadersToAdd: mapHttpHeaders(httpConf.RequestHeadersToAdd),
ExpectedStatuses: expectedStatuses,
CodecClientType: codecClientType,
}
return &envoy_core.HealthCheck_HttpHealthCheck_{
HttpHealthCheck: &httpHealthCheck,
}
}
func healthPanicThreshold(cluster *envoy_cluster.Cluster, value *wrapperspb.FloatValue) {
if value == nil {
return
}
if cluster.CommonLbConfig == nil {
cluster.CommonLbConfig = &envoy_cluster.Cluster_CommonLbConfig{}
}
cluster.CommonLbConfig.HealthyPanicThreshold = &envoy_type.Percent{Value: float64(value.Value)}
}
func failTrafficOnPanic(cluster *envoy_cluster.Cluster, value *wrapperspb.BoolValue) {
if value == nil {
return
}
if cluster.CommonLbConfig == nil {
cluster.CommonLbConfig = &envoy_cluster.Cluster_CommonLbConfig{}
}
if cluster.CommonLbConfig.GetLocalityWeightedLbConfig() != nil {
// used load balancing type doesn't support 'fail_traffic_on_panic', right now we don't use
// 'locality_weighted_lb_config' in Kuma, locality aware load balancing is implemented based on priority levels
core.Log.WithName("health-check-configurer").Error(
errors.New("unable to set 'fail_traffic_on_panic' for 'locality_weighted_lb_config' load balancer"),
"unable to configure 'fail_traffic_on_panic', parameter is ignored")
return
}
if cluster.CommonLbConfig.LocalityConfigSpecifier == nil {
cluster.CommonLbConfig.LocalityConfigSpecifier = &envoy_cluster.Cluster_CommonLbConfig_ZoneAwareLbConfig_{
ZoneAwareLbConfig: &envoy_cluster.Cluster_CommonLbConfig_ZoneAwareLbConfig{},
}
}
if zoneAwareLbConfig := cluster.CommonLbConfig.GetZoneAwareLbConfig(); zoneAwareLbConfig != nil {
zoneAwareLbConfig.FailTrafficOnPanic = value.GetValue()
}
}
func buildHealthCheck(conf *mesh_proto.HealthCheck_Conf) *envoy_core.HealthCheck {
return &envoy_core.HealthCheck{
HealthChecker: &envoy_core.HealthCheck_TcpHealthCheck_{
TcpHealthCheck: &envoy_core.HealthCheck_TcpHealthCheck{},
},
Interval: conf.Interval,
Timeout: conf.Timeout,
UnhealthyThreshold: &wrapperspb.UInt32Value{Value: conf.UnhealthyThreshold},
HealthyThreshold: &wrapperspb.UInt32Value{Value: conf.HealthyThreshold},
InitialJitter: conf.InitialJitter,
IntervalJitter: conf.IntervalJitter,
IntervalJitterPercent: conf.IntervalJitterPercent,
EventLogPath: conf.EventLogPath,
AlwaysLogHealthCheckFailures: conf.AlwaysLogHealthCheckFailures.GetValue(),
NoTrafficInterval: conf.NoTrafficInterval,
ReuseConnection: conf.ReuseConnection,
}
}
func addHealthChecker(healthCheck *envoy_core.HealthCheck, healthChecker interface{}) *envoy_core.HealthCheck {
if httpHc, ok := healthChecker.(*envoy_core.HealthCheck_HttpHealthCheck_); ok {
healthCheck.HealthChecker = httpHc
} else if tcpHc, ok := healthChecker.(*envoy_core.HealthCheck_TcpHealthCheck_); ok {
healthCheck.HealthChecker = tcpHc
}
return healthCheck
}
func (e *HealthCheckConfigurer) Configure(cluster *envoy_cluster.Cluster) error {
if e.HealthCheck == nil || e.HealthCheck.Spec.Conf == nil {
return nil
}
activeChecks := e.HealthCheck.Spec.Conf
healthPanicThreshold(cluster, activeChecks.GetHealthyPanicThreshold())
failTrafficOnPanic(cluster, activeChecks.GetFailTrafficOnPanic())
tcp := activeChecks.GetTcp()
http := activeChecks.GetHttp()
if tcp == nil && http == nil {
cluster.HealthChecks = append(cluster.HealthChecks, buildHealthCheck(activeChecks))
return nil
}
if tcp != nil {
defaultHealthCheck := buildHealthCheck(activeChecks)
healthChecker := tcpHealthCheck(tcp)
healthCheck := addHealthChecker(defaultHealthCheck, healthChecker)
cluster.HealthChecks = append(cluster.HealthChecks, healthCheck)
}
if http != nil {
defaultHealthCheck := buildHealthCheck(activeChecks)
healthChecker := httpHealthCheck(e.Protocol, http)
healthCheck := addHealthChecker(defaultHealthCheck, healthChecker)
cluster.HealthChecks = append(cluster.HealthChecks, healthCheck)
}
return nil
}
| tcpHealthCheck |
cli.rs | use std::env;
pub fn run() | {
let args: Vec<String> = env::args().collect();
let command = args[1].clone();
let name = "Brad";
let status = "100%";
println!("Command: {:?}", command);
if command == "hello" {
println!("Hi {}, how are you?", name);
} else if command == "status" {
println!("Status is {}", status);
} else {
println!("That is not a valid command");
}
} |
|
api_op_ListProjects.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package iotsitewise
import (
"context"
"fmt"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/iotsitewise/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Retrieves a paginated list of projects for an AWS IoT SiteWise Monitor portal.
func (c *Client) ListProjects(ctx context.Context, params *ListProjectsInput, optFns ...func(*Options)) (*ListProjectsOutput, error) {
if params == nil {
params = &ListProjectsInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListProjects", params, optFns, c.addOperationListProjectsMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListProjectsOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListProjectsInput struct {
// The ID of the portal.
//
// This member is required.
PortalId *string
// The maximum number of results to be returned per paginated request. Default: 50
MaxResults *int32
// The token to be used for the next set of paginated results.
NextToken *string
}
type ListProjectsOutput struct {
// A list that summarizes each project in the portal.
//
// This member is required.
ProjectSummaries []types.ProjectSummary
// The token for the next set of results, or null if there are no additional
// results.
NextToken *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func (c *Client) addOperationListProjectsMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsRestjson1_serializeOpListProjects{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsRestjson1_deserializeOpListProjects{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil { | }
if err = addOpListProjectsValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListProjects(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
type endpointPrefix_opListProjectsMiddleware struct {
}
func (*endpointPrefix_opListProjectsMiddleware) ID() string {
return "EndpointHostPrefix"
}
func (m *endpointPrefix_opListProjectsMiddleware) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
if smithyhttp.GetHostnameImmutable(ctx) || smithyhttp.IsEndpointHostPrefixDisabled(ctx) {
return next.HandleSerialize(ctx, in)
}
req, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, fmt.Errorf("unknown transport type %T", in.Request)
}
req.URL.Host = "monitor." + req.URL.Host
return next.HandleSerialize(ctx, in)
}
func addEndpointPrefix_opListProjectsMiddleware(stack *middleware.Stack) error {
return stack.Serialize.Insert(&endpointPrefix_opListProjectsMiddleware{}, `OperationSerializer`, middleware.After)
}
// ListProjectsAPIClient is a client that implements the ListProjects operation.
type ListProjectsAPIClient interface {
ListProjects(context.Context, *ListProjectsInput, ...func(*Options)) (*ListProjectsOutput, error)
}
var _ ListProjectsAPIClient = (*Client)(nil)
// ListProjectsPaginatorOptions is the paginator options for ListProjects
type ListProjectsPaginatorOptions struct {
// The maximum number of results to be returned per paginated request. Default: 50
Limit int32
// Set to true if pagination should stop if the service returns a pagination token
// that matches the most recent token provided to the service.
StopOnDuplicateToken bool
}
// ListProjectsPaginator is a paginator for ListProjects
type ListProjectsPaginator struct {
options ListProjectsPaginatorOptions
client ListProjectsAPIClient
params *ListProjectsInput
nextToken *string
firstPage bool
}
// NewListProjectsPaginator returns a new ListProjectsPaginator
func NewListProjectsPaginator(client ListProjectsAPIClient, params *ListProjectsInput, optFns ...func(*ListProjectsPaginatorOptions)) *ListProjectsPaginator {
if params == nil {
params = &ListProjectsInput{}
}
options := ListProjectsPaginatorOptions{}
if params.MaxResults != nil {
options.Limit = *params.MaxResults
}
for _, fn := range optFns {
fn(&options)
}
return &ListProjectsPaginator{
options: options,
client: client,
params: params,
firstPage: true,
}
}
// HasMorePages returns a boolean indicating whether more pages are available
func (p *ListProjectsPaginator) HasMorePages() bool {
return p.firstPage || p.nextToken != nil
}
// NextPage retrieves the next ListProjects page.
func (p *ListProjectsPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListProjectsOutput, error) {
if !p.HasMorePages() {
return nil, fmt.Errorf("no more pages available")
}
params := *p.params
params.NextToken = p.nextToken
var limit *int32
if p.options.Limit > 0 {
limit = &p.options.Limit
}
params.MaxResults = limit
result, err := p.client.ListProjects(ctx, ¶ms, optFns...)
if err != nil {
return nil, err
}
p.firstPage = false
prevToken := p.nextToken
p.nextToken = result.NextToken
if p.options.StopOnDuplicateToken && prevToken != nil && p.nextToken != nil && *prevToken == *p.nextToken {
p.nextToken = nil
}
return result, nil
}
func newServiceMetadataMiddleware_opListProjects(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "iotsitewise",
OperationName: "ListProjects",
}
} | return err
}
if err = addEndpointPrefix_opListProjectsMiddleware(stack); err != nil {
return err |
lib.rs | // =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// ================================================================= |
//! <fullname>Amazon Relational Database Service</fullname> <p> </p> <p>Amazon Relational Database Service (Amazon RDS) is a web service that makes it easier to set up, operate, and scale a relational database in the cloud. It provides cost-efficient, resizable capacity for an industry-standard relational database and manages common database administration tasks, freeing up developers to focus on what makes their applications and businesses unique.</p> <p>Amazon RDS gives you access to the capabilities of a MySQL, MariaDB, PostgreSQL, Microsoft SQL Server, Oracle, or Amazon Aurora database server. These capabilities mean that the code, applications, and tools you already use today with your existing databases work with Amazon RDS without modification. Amazon RDS automatically backs up your database and maintains the database software that powers your DB instance. Amazon RDS is flexible: you can scale your database instance's compute resources and storage capacity to meet your application's demand. As with all Amazon Web Services, there are no up-front investments, and you pay only for the resources you use.</p> <p>This interface reference for Amazon RDS contains documentation for a programming or command line interface you can use to manage Amazon RDS. Note that Amazon RDS is asynchronous, which means that some interfaces might require techniques such as polling or callback functions to determine when a command has been applied. In this reference, the parameter descriptions indicate whether a command is applied immediately, on the next instance reboot, or during the maintenance window. The reference structure is as follows, and we list following some related topics from the user guide.</p> <p> <b>Amazon RDS API Reference</b> </p> <ul> <li> <p>For the alphabetical list of API actions, see <a href="http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_Operations.html">API Actions</a>.</p> </li> <li> <p>For the alphabetical list of data types, see <a href="http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_Types.html">Data Types</a>.</p> </li> <li> <p>For a list of common query parameters, see <a href="http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/CommonParameters.html">Common Parameters</a>.</p> </li> <li> <p>For descriptions of the error codes, see <a href="http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/CommonErrors.html">Common Errors</a>.</p> </li> </ul> <p> <b>Amazon RDS User Guide</b> </p> <ul> <li> <p>For a summary of the Amazon RDS interfaces, see <a href="http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Welcome.html#Welcome.Interfaces">Available RDS Interfaces</a>.</p> </li> <li> <p>For more information about how to use the Query API, see <a href="http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Using_the_Query_API.html">Using the Query API</a>.</p> </li> </ul>
//!
//! If you're using the service, you're probably looking for [RdsClient](struct.RdsClient.html) and [Rds](trait.Rds.html).
extern crate hyper;
extern crate rusoto_core;
extern crate xml;
mod generated;
mod custom;
pub use generated::*;
pub use custom::*; | |
utils.py | """ANTS Apply Transforms interface
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
import os
from .base import ANTSCommand, ANTSCommandInputSpec
from ..base import (TraitedSpec, File, traits,
isdefined)
from ...utils.filemanip import split_filename
from nipype.interfaces.base import InputMultiPath
class AverageAffineTransformInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True, position=0, desc='image dimension (2 or 3)')
output_affine_transform = File(argstr='%s', mandatory=True, position=1, desc='Outputfname.txt: the name of the resulting transform.')
transforms = InputMultiPath(File(exists=True), argstr='%s', mandatory=True,
position=3, desc=('transforms to average'))
class AverageAffineTransformOutputSpec(TraitedSpec):
affine_transform = File(exists=True, desc='average transform file')
class AverageAffineTransform(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import AverageAffineTransform
>>> avg = AverageAffineTransform()
>>> avg.inputs.dimension = 3
>>> avg.inputs.transforms = ['trans.mat', 'func_to_struct.mat']
>>> avg.inputs.output_affine_transform = 'MYtemplatewarp.mat'
>>> avg.cmdline
'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat'
"""
_cmd = 'AverageAffineTransform'
input_spec = AverageAffineTransformInputSpec
output_spec = AverageAffineTransformOutputSpec
def _format_arg(self, opt, spec, val):
return super(AverageAffineTransform, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['affine_transform'] = os.path.abspath(
self.inputs.output_affine_transform)
return outputs
class AverageImagesInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', mandatory=True,
position=0, desc='image dimension (2 or 3)')
output_average_image = File("average.nii", argstr='%s', position=1, desc='the name of the resulting image.', usedefault=True, hash_files=False)
normalize = traits.Bool(argstr="%d", mandatory=True, position=2, desc='Normalize: if true, the 2nd image' +
'is divided by its mean. This will select the largest image to average into.')
images = InputMultiPath(File(exists=True), argstr='%s', mandatory=True, position=3, desc=('image to apply transformation to (generally a coregistered functional)'))
class AverageImagesOutputSpec(TraitedSpec):
output_average_image = File(exists=True, desc='average image file')
class AverageImages(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import AverageImages
>>> avg = AverageImages()
>>> avg.inputs.dimension = 3
>>> avg.inputs.output_average_image = "average.nii.gz"
>>> avg.inputs.normalize = True
>>> avg.inputs.images = ['rc1s1.nii', 'rc1s1.nii']
>>> avg.cmdline
'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii'
"""
_cmd = 'AverageImages'
input_spec = AverageImagesInputSpec
output_spec = AverageImagesOutputSpec
def _format_arg(self, opt, spec, val):
return super(AverageImages, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['output_average_image'] = os.path.realpath(
self.inputs.output_average_image)
return outputs
class MultiplyImagesInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True, position=0, desc='image dimension (2 or 3)')
first_input = File(
argstr='%s', exists=True, mandatory=True, position=1, desc='image 1')
second_input = traits.Either(File(exists=True), traits.Float, argstr='%s', mandatory=True, position=2, desc='image 2 or multiplication weight')
output_product_image = File(argstr='%s', mandatory=True, position=3, desc='Outputfname.nii.gz: the name of the resulting image.')
class MultiplyImagesOutputSpec(TraitedSpec):
|
class MultiplyImages(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import MultiplyImages
>>> test = MultiplyImages()
>>> test.inputs.dimension = 3
>>> test.inputs.first_input = 'moving2.nii'
>>> test.inputs.second_input = 0.25
>>> test.inputs.output_product_image = "out.nii"
>>> test.cmdline
'MultiplyImages 3 moving2.nii 0.25 out.nii'
"""
_cmd = 'MultiplyImages'
input_spec = MultiplyImagesInputSpec
output_spec = MultiplyImagesOutputSpec
def _format_arg(self, opt, spec, val):
return super(MultiplyImages, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['output_product_image'] = os.path.abspath(
self.inputs.output_product_image)
return outputs
class JacobianDeterminantInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True,
position=0, desc='image dimension (2 or 3)')
warp_file = File(argstr='%s', exists=True, mandatory=True,
position=1, desc='input warp file')
output_prefix = File(argstr='%s', genfile=True, hash_files=False,
position=2,
desc=('prefix of the output image filename: '
'PREFIX(log)jacobian.nii.gz'))
use_log = traits.Enum(0, 1, argstr='%d', position=3,
desc='log transform the jacobian determinant')
template_mask = File(argstr='%s', exists=True, position=4,
desc='template mask to adjust for head size')
norm_by_total = traits.Enum(0, 1, argstr='%d', position=5,
desc=('normalize jacobian by total in mask to '
'adjust for head size'))
projection_vector = traits.List(traits.Float(), argstr='%s', sep='x',
position=6,
desc='vector to project warp against')
class JacobianDeterminantOutputSpec(TraitedSpec):
jacobian_image = File(exists=True, desc='(log transformed) jacobian image')
class JacobianDeterminant(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import JacobianDeterminant
>>> jacobian = JacobianDeterminant()
>>> jacobian.inputs.dimension = 3
>>> jacobian.inputs.warp_file = 'ants_Warp.nii.gz'
>>> jacobian.inputs.output_prefix = 'Sub001_'
>>> jacobian.inputs.use_log = 1
>>> jacobian.cmdline
'ANTSJacobian 3 ants_Warp.nii.gz Sub001_ 1'
"""
_cmd = 'ANTSJacobian'
input_spec = JacobianDeterminantInputSpec
output_spec = JacobianDeterminantOutputSpec
def _gen_filename(self, name):
if name == 'output_prefix':
output = self.inputs.output_prefix
if not isdefined(output):
_, name, ext = split_filename(self.inputs.warp_file)
output = name + '_'
return output
return None
def _list_outputs(self):
outputs = self._outputs().get()
if self.inputs.use_log == 1:
outputs['jacobian_image'] = os.path.abspath(
self._gen_filename('output_prefix') + 'logjacobian.nii.gz')
else:
outputs['jacobian_image'] = os.path.abspath(
self._gen_filename('output_prefix') + 'jacobian.nii.gz')
return outputs
| output_product_image = File(exists=True, desc='average image file') |
Fashion_Test.py | """
Keras RFCN
Copyright (c) 2018
Licensed under the MIT License (see LICENSE for details)
Written by [email protected]
"""
'''
This is a demo to Eval a RFCN model with DeepFashion Dataset
http://mmlab.ie.cuhk.edu.hk/projects/DeepFashion.html
'''
from KerasRFCN.Model.Model import RFCN_Model
from KerasRFCN.Config import Config
import KerasRFCN.Utils
import os
from keras.preprocessing import image
import pickle
import numpy as np
import argparse
import matplotlib.pyplot as plt
import matplotlib.patches as patches
class RFCNNConfig(Config):
"""Configuration for training on the toy shapes dataset.
Derives from the base Config class and overrides values specific
to the toy shapes dataset.
"""
# Give the configuration a recognizable name
NAME = "Fashion"
# Backbone model
# choose one from ['resnet50', 'resnet101', 'resnet50_dilated', 'resnet101_dilated']
BACKBONE = "resnet101"
# Train on 1 GPU and 8 images per GPU. We can put multiple images on each
# GPU because the images are small. Batch size is 8 (GPUs * images/GPU).
GPU_COUNT = 1
IMAGES_PER_GPU = 1
# Number of classes (including background)
C = 1 + 46 # background + 2 tags
NUM_CLASSES = C
# Use small images for faster training. Set the limits of the small side
# the large side, and that determines the image shape.
IMAGE_MIN_DIM = 640
IMAGE_MAX_DIM = 768
# Use smaller anchors because our image and objects are small
RPN_ANCHOR_SCALES = (32, 64, 128, 256, 512) # anchor side in pixels
# Use same strides on stage 4-6 if use dilated resnet of DetNet
# Like BACKBONE_STRIDES = [4, 8, 16, 16, 16]
BACKBONE_STRIDES = [4, 8, 16, 32, 64]
# Reduce training ROIs per image because the images are small and have
# few objects. Aim to allow ROI sampling to pick 33% positive ROIs.
TRAIN_ROIS_PER_IMAGE = 200
# Use a small epoch since the data is simple
STEPS_PER_EPOCH = 100
# use small validation steps since the epoch is small
VALIDATION_STEPS = 5
RPN_NMS_THRESHOLD = 0.7
DETECTION_MIN_CONFIDENCE = 0.4
POOL_SIZE = 7
def | (model, loadpath, savepath):
assert not loadpath == savepath, "loadpath should'n same with savepath"
model_path = model.find_last()[1]
# Load trained weights (fill in path to trained weights here)
model.load_weights(model_path, by_name=True)
print("Loading weights from ", model_path)
if os.path.isdir(loadpath):
for idx, imgname in enumerate(os.listdir(loadpath)):
if not imgname.lower().endswith(('.bmp', '.jpeg', '.jpg', '.png', '.tif', '.tiff')):
continue
print(imgname)
imageoriChannel = np.array(plt.imread( os.path.join(loadpath, imgname) )) / 255.0
img = image.img_to_array( image.load_img(os.path.join(loadpath, imgname)) )
TestSinglePic(img, imageoriChannel, model, savepath=savepath, imgname=imgname)
elif os.path.isfile(loadpath):
if not loadpath.lower().endswith(('.bmp', '.jpeg', '.jpg', '.png', '.tif', '.tiff')):
print("not image file!")
return
print(loadpath)
imageoriChannel = np.array(plt.imread( loadpath )) / 255.0
img = image.img_to_array( image.load_img(loadpath) )
(filename,extension) = os.path.splitext(loadpath)
TestSinglePic(img, imageoriChannel, model, savepath=savepath, imgname=filename)
def TestSinglePic(image, image_ori, model, savepath, imgname):
r = model.detect([image], verbose=1)[0]
print(r)
def get_ax(rows=1, cols=1, size=8):
_, ax = plt.subplots(rows, cols, figsize=(size*cols, size*rows))
return ax
ax = get_ax(1)
assert not savepath == "", "empty save path"
assert not imgname == "", "empty image file name"
for box in r['rois']:
y1, x1, y2, x2 = box
p = patches.Rectangle((x1, y1), x2 - x1, y2 - y1, linewidth=2,
alpha=0.7, linestyle="dashed",
edgecolor="red", facecolor='none')
ax.add_patch(p)
ax.imshow(image_ori)
plt.savefig(os.path.join(savepath, imgname),bbox_inches='tight')
plt.clf()
if __name__ == '__main__':
ROOT_DIR = os.getcwd()
parser = argparse.ArgumentParser()
parser.add_argument('--loadpath', required=False,
default="images/",
metavar="evaluate images loadpath",
help="evaluate images loadpath")
parser.add_argument('--savepath', required=False,
default="result/",
metavar="evaluate images savepath",
help="evaluate images savepath")
config = RFCNNConfig()
args = parser.parse_args()
model = RFCN_Model(mode="inference", config=config,
model_dir=os.path.join(ROOT_DIR, "logs") )
Test(model, args.loadpath, args.savepath) | Test |
lib.rs | /*!
*xmlparser* is a low-level, pull-based, zero-allocation
[XML 1.0](https://www.w3.org/TR/xml/) parser.
## Example
```rust
for token in xmlparser::Tokenizer::from("<tagname name='value'/>") {
println!("{:?}", token);
}
```
## Why a new library?
This library is basically a low-level XML tokenizer that preserves the positions of the tokens
and is not intended to be used directly.
If you are looking for a higher level solution, check out
[roxmltree](https://github.com/RazrFalcon/roxmltree).
## Benefits
- All tokens contain `StrSpan` structs which represent the position of the substring
in the original document.
- Good error processing. All error types contain the position (line:column) where it occurred.
- No heap allocations.
- No dependencies.
- Tiny. ~1400 LOC and ~30KiB in the release build according to `cargo-bloat`.
- Supports `no_std` builds. To use without the standard library, disable the default features.
## Limitations
- Currently, only ENTITY objects are parsed from the DOCTYPE. All others are ignored.
- No tree structure validation. So an XML like `<root><child></root></child>`
or a string without root element
will be parsed without errors. You should check for this manually.
On the other hand `<a/><a/>` will lead to an error.
- Duplicated attributes is not an error. So XML like `<item a="v1" a="v2"/>`
will be parsed without errors. You should check for this manually.
- UTF-8 only.
## Safety
- The library must not panic. Any panic is considered a critical bug
and should be reported.
- The library forbids unsafe code.
*/
#![no_std]
#![doc(html_root_url = "https://docs.rs/xmlparser/0.13.4")]
#![forbid(unsafe_code)]
#![warn(missing_docs)]
#![allow(ellipsis_inclusive_range_patterns)]
#[cfg(feature = "std")]
#[macro_use]
extern crate std;
macro_rules! matches {
($expression:expr, $($pattern:tt)+) => {
match $expression {
$($pattern)+ => true,
_ => false
}
}
}
mod error;
mod stream;
mod strspan;
mod xmlchar;
pub use crate::error::*;
pub use crate::stream::*;
pub use crate::strspan::*;
pub use crate::xmlchar::*;
/// An XML token.
#[allow(missing_docs)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum Token<'a> {
/// Declaration token.
///
/// ```text
/// <?xml version='1.0' encoding='UTF-8' standalone='yes'?>
/// --- - version
/// ----- - encoding?
/// --- - standalone?
/// ------------------------------------------------------- - span
/// ```
Declaration {
version: StrSpan<'a>,
encoding: Option<StrSpan<'a>>,
standalone: Option<bool>,
span: StrSpan<'a>,
},
/// Processing instruction token.
///
/// ```text
/// <?target content?>
/// ------ - target
/// ------- - content?
/// ------------------ - span
/// ```
ProcessingInstruction {
target: StrSpan<'a>,
content: Option<StrSpan<'a>>,
span: StrSpan<'a>,
},
/// Comment token.
///
/// ```text
/// <!-- text -->
/// ------ - text
/// ------------- - span
/// ```
Comment {
text: StrSpan<'a>,
span: StrSpan<'a>,
},
/// DOCTYPE start token.
///
/// ```text
/// <!DOCTYPE greeting SYSTEM "hello.dtd" [
/// -------- - name
/// ------------------ - external_id?
/// --------------------------------------- - span
/// ```
DtdStart {
name: StrSpan<'a>,
external_id: Option<ExternalId<'a>>,
span: StrSpan<'a>,
},
/// Empty DOCTYPE token.
///
/// ```text
/// <!DOCTYPE greeting SYSTEM "hello.dtd">
/// -------- - name
/// ------------------ - external_id?
/// -------------------------------------- - span
/// ```
EmptyDtd {
name: StrSpan<'a>,
external_id: Option<ExternalId<'a>>,
span: StrSpan<'a>,
},
/// ENTITY token.
///
/// Can appear only inside the DTD.
///
/// ```text
/// <!ENTITY ns_extend "http://test.com">
/// --------- - name
/// --------------- - definition
/// ------------------------------------- - span
/// ```
EntityDeclaration {
name: StrSpan<'a>,
definition: EntityDefinition<'a>,
span: StrSpan<'a>,
},
/// DOCTYPE end token.
///
/// ```text
/// <!DOCTYPE svg [
/// ...
/// ]>
/// -- - span
/// ```
DtdEnd {
span: StrSpan<'a>,
},
/// Element start token.
///
/// ```text
/// <ns:elem attr="value"/>
/// -- - prefix
/// ---- - local
/// -------- - span
/// ```
ElementStart {
prefix: StrSpan<'a>,
local: StrSpan<'a>,
span: StrSpan<'a>,
},
/// Attribute token.
///
/// ```text
/// <elem ns:attr="value"/>
/// -- - prefix
/// ---- - local
/// ----- - value
/// --------------- - span
/// ```
Attribute {
prefix: StrSpan<'a>,
local: StrSpan<'a>,
value: StrSpan<'a>,
span: StrSpan<'a>,
},
/// Element end token.
///
/// ```text
/// <ns:elem>text</ns:elem>
/// - ElementEnd::Open
/// - - span
/// ```
///
/// ```text
/// <ns:elem>text</ns:elem>
/// -- ---- - ElementEnd::Close(prefix, local)
/// ---------- - span
/// ```
///
/// ```text
/// <ns:elem/>
/// - ElementEnd::Empty
/// -- - span
/// ```
ElementEnd {
end: ElementEnd<'a>,
span: StrSpan<'a>,
}, | /// Text token.
///
/// Contains text between elements including whitespaces.
/// Basically everything between `>` and `<`.
/// Except `]]>`, which is not allowed and will lead to an error.
///
/// ```text
/// <p> text </p>
/// ------ - text
/// ```
///
/// The token span is equal to the `text`.
Text {
text: StrSpan<'a>,
},
/// CDATA token.
///
/// ```text
/// <p><![CDATA[text]]></p>
/// ---- - text
/// ---------------- - span
/// ```
Cdata {
text: StrSpan<'a>,
span: StrSpan<'a>,
},
}
/// `ElementEnd` token.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum ElementEnd<'a> {
/// Indicates `>`
Open,
/// Indicates `</name>`
Close(StrSpan<'a>, StrSpan<'a>),
/// Indicates `/>`
Empty,
}
/// Representation of the [ExternalID](https://www.w3.org/TR/xml/#NT-ExternalID) value.
#[allow(missing_docs)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum ExternalId<'a> {
System(StrSpan<'a>),
Public(StrSpan<'a>, StrSpan<'a>),
}
/// Representation of the [EntityDef](https://www.w3.org/TR/xml/#NT-EntityDef) value.
#[allow(missing_docs)]
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum EntityDefinition<'a> {
EntityValue(StrSpan<'a>),
ExternalId(ExternalId<'a>),
}
type Result<T> = core::result::Result<T, Error>;
type StreamResult<T> = core::result::Result<T, StreamError>;
#[derive(Clone, Copy, PartialEq)]
enum State {
Declaration,
AfterDeclaration,
Dtd,
AfterDtd,
Elements,
Attributes,
AfterElements,
End,
}
/// Tokenizer for the XML structure.
pub struct Tokenizer<'a> {
stream: Stream<'a>,
state: State,
depth: usize,
fragment_parsing: bool,
}
impl<'a> From<&'a str> for Tokenizer<'a> {
#[inline]
fn from(text: &'a str) -> Self {
let mut stream = Stream::from(text);
// Skip UTF-8 BOM.
if stream.starts_with(&[0xEF, 0xBB, 0xBF]) {
stream.advance(3);
}
Tokenizer {
stream,
state: State::Declaration,
depth: 0,
fragment_parsing: false,
}
}
}
macro_rules! map_err_at {
($fun:expr, $stream:expr, $err:ident) => {{
let start = $stream.pos();
$fun.map_err(|e|
Error::$err(e, $stream.gen_text_pos_from(start))
)
}}
}
impl<'a> Tokenizer<'a> {
/// Enables document fragment parsing.
///
/// By default, `xmlparser` will check for DTD, root element, etc.
/// But if we have to parse an XML fragment, it will lead to an error.
/// This method switches the parser to the root element content parsing mode,
/// so it will treat any data as a content of the root element.
pub fn from_fragment(full_text: &'a str, fragment: core::ops::Range<usize>) -> Self {
Tokenizer {
stream: Stream::from_substr(full_text, fragment),
state: State::Elements,
depth: 0,
fragment_parsing: true,
}
}
fn parse_next_impl(&mut self) -> Option<Result<Token<'a>>> {
let s = &mut self.stream;
if s.at_end() {
return None;
}
let start = s.pos();
match self.state {
State::Declaration => {
self.state = State::AfterDeclaration;
if s.starts_with(b"<?xml ") {
Some(Self::parse_declaration(s))
} else {
self.parse_next_impl()
}
}
State::AfterDeclaration => {
if s.starts_with(b"<!DOCTYPE") {
let t = Self::parse_doctype(s);
match t {
Ok(Token::DtdStart { .. }) => self.state = State::Dtd,
Ok(Token::EmptyDtd { .. }) => self.state = State::AfterDtd,
_ => {}
}
Some(t)
} else if s.starts_with(b"<!--") {
Some(Self::parse_comment(s))
} else if s.starts_with(b"<?") {
if s.starts_with(b"<?xml ") {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
} else {
Some(Self::parse_pi(s))
}
} else if s.starts_with_space() {
s.skip_spaces();
self.parse_next_impl()
} else {
self.state = State::AfterDtd;
self.parse_next_impl()
}
}
State::Dtd => {
if s.starts_with(b"<!ENTITY") {
Some(Self::parse_entity_decl(s))
} else if s.starts_with(b"<!--") {
Some(Self::parse_comment(s))
} else if s.starts_with(b"<?") {
if s.starts_with(b"<?xml ") {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
} else {
Some(Self::parse_pi(s))
}
} else if s.starts_with(b"]") {
// DTD ends with ']' S? '>', therefore we have to skip possible spaces.
s.advance(1);
s.skip_spaces();
match s.curr_byte() {
Ok(b'>') => {
self.state = State::AfterDtd;
s.advance(1);
Some(Ok(Token::DtdEnd { span: s.slice_back(start) }))
}
Ok(c) => {
let e = StreamError::InvalidChar(c, b'>', s.gen_text_pos());
Some(Err(Error::InvalidDoctype(e, s.gen_text_pos_from(start))))
}
Err(_) => {
let e = StreamError::UnexpectedEndOfStream;
Some(Err(Error::InvalidDoctype(e, s.gen_text_pos_from(start))))
}
}
} else if s.starts_with_space() {
s.skip_spaces();
self.parse_next_impl()
} else if s.starts_with(b"<!ELEMENT")
|| s.starts_with(b"<!ATTLIST")
|| s.starts_with(b"<!NOTATION")
{
if Self::consume_decl(s).is_err() {
let pos = s.gen_text_pos_from(start);
Some(Err(Error::UnknownToken(pos)))
} else {
self.parse_next_impl()
}
} else {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
}
}
State::AfterDtd => {
if s.starts_with(b"<!--") {
Some(Self::parse_comment(s))
} else if s.starts_with(b"<?") {
if s.starts_with(b"<?xml ") {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
} else {
Some(Self::parse_pi(s))
}
} else if s.starts_with(b"<!") {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
} else if s.starts_with(b"<") {
self.state = State::Attributes;
Some(Self::parse_element_start(s))
} else if s.starts_with_space() {
s.skip_spaces();
self.parse_next_impl()
} else {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
}
}
State::Elements => {
// Use `match` only here, because only this section is performance-critical.
match s.curr_byte() {
Ok(b'<') => {
match s.next_byte() {
Ok(b'!') => {
if s.starts_with(b"<!--") {
Some(Self::parse_comment(s))
} else if s.starts_with(b"<![CDATA[") {
Some(Self::parse_cdata(s))
} else {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
}
}
Ok(b'?') => {
if !s.starts_with(b"<?xml ") {
Some(Self::parse_pi(s))
} else {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
}
}
Ok(b'/') => {
if self.depth > 0 {
self.depth -= 1;
}
if self.depth == 0 && !self.fragment_parsing {
self.state = State::AfterElements;
} else {
self.state = State::Elements;
}
Some(Self::parse_close_element(s))
}
Ok(_) => {
self.state = State::Attributes;
Some(Self::parse_element_start(s))
}
Err(_) => {
return Some(Err(Error::UnknownToken(s.gen_text_pos())));
}
}
}
Ok(_) => {
Some(Self::parse_text(s))
}
Err(_) => {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
}
}
}
State::Attributes => {
let t = Self::parse_attribute(s);
if let Ok(Token::ElementEnd { end, .. }) = t {
if end == ElementEnd::Open {
self.depth += 1;
}
if self.depth == 0 && !self.fragment_parsing {
self.state = State::AfterElements;
} else {
self.state = State::Elements;
}
}
Some(t.map_err(|e| Error::InvalidAttribute(e, s.gen_text_pos_from(start))))
}
State::AfterElements => {
if s.starts_with(b"<!--") {
Some(Self::parse_comment(s))
} else if s.starts_with(b"<?") {
if s.starts_with(b"<?xml ") {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
} else {
Some(Self::parse_pi(s))
}
} else if s.starts_with_space() {
s.skip_spaces();
self.parse_next_impl()
} else {
Some(Err(Error::UnknownToken(s.gen_text_pos())))
}
}
State::End => {
None
}
}
}
fn parse_declaration(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_declaration_impl(s), s, InvalidDeclaration)
}
// XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
fn parse_declaration_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
fn consume_spaces(s: &mut Stream) -> StreamResult<()> {
if s.starts_with_space() {
s.skip_spaces();
} else if !s.starts_with(b"?>") && !s.at_end() {
return Err(StreamError::InvalidSpace(s.curr_byte_unchecked(), s.gen_text_pos()));
}
Ok(())
}
let start = s.pos();
s.advance(6);
let version = Self::parse_version_info(s)?;
consume_spaces(s)?;
let encoding = Self::parse_encoding_decl(s)?;
if encoding.is_some() {
consume_spaces(s)?;
}
let standalone = Self::parse_standalone(s)?;
s.skip_spaces();
s.skip_string(b"?>")?;
let span = s.slice_back(start);
Ok(Token::Declaration { version, encoding, standalone, span })
}
// VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')
// VersionNum ::= '1.' [0-9]+
fn parse_version_info(s: &mut Stream<'a>) -> StreamResult<StrSpan<'a>> {
s.skip_spaces();
s.skip_string(b"version")?;
s.consume_eq()?;
let quote = s.consume_quote()?;
let start = s.pos();
s.skip_string(b"1.")?;
s.skip_bytes(|_, c| c.is_xml_digit());
let ver = s.slice_back(start);
s.consume_byte(quote)?;
Ok(ver)
}
// EncodingDecl ::= S 'encoding' Eq ('"' EncName '"' | "'" EncName "'" )
// EncName ::= [A-Za-z] ([A-Za-z0-9._] | '-')*
fn parse_encoding_decl(s: &mut Stream<'a>) -> StreamResult<Option<StrSpan<'a>>> {
if !s.starts_with(b"encoding") {
return Ok(None);
}
s.advance(8);
s.consume_eq()?;
let quote = s.consume_quote()?;
// [A-Za-z] ([A-Za-z0-9._] | '-')*
// TODO: check that first byte is [A-Za-z]
let name = s.consume_bytes(|_, c| {
c.is_xml_letter()
|| c.is_xml_digit()
|| c == b'.'
|| c == b'-'
|| c == b'_'
});
s.consume_byte(quote)?;
Ok(Some(name))
}
// SDDecl ::= S 'standalone' Eq (("'" ('yes' | 'no') "'") | ('"' ('yes' | 'no') '"'))
fn parse_standalone(s: &mut Stream<'a>) -> StreamResult<Option<bool>> {
if !s.starts_with(b"standalone") {
return Ok(None);
}
s.advance(10);
s.consume_eq()?;
let quote = s.consume_quote()?;
let start = s.pos();
let value = s.consume_name()?.as_str();
let flag = match value {
"yes" => true,
"no" => false,
_ => {
let pos = s.gen_text_pos_from(start);
return Err(StreamError::InvalidString("yes', 'no", pos));
}
};
s.consume_byte(quote)?;
Ok(Some(flag))
}
fn parse_comment(s: &mut Stream<'a>) -> Result<Token<'a>> {
let start = s.pos();
Self::parse_comment_impl(s)
.map_err(|e| Error::InvalidComment(e, s.gen_text_pos_from(start)))
}
// '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
fn parse_comment_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(4);
let text = s.consume_chars(|s, c| !(c == '-' && s.starts_with(b"-->")))?;
s.skip_string(b"-->")?;
if text.as_str().contains("--") {
return Err(StreamError::InvalidCommentData);
}
if text.as_str().ends_with('-') {
return Err(StreamError::InvalidCommentEnd);
}
let span = s.slice_back(start);
Ok(Token::Comment { text, span })
}
fn parse_pi(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_pi_impl(s), s, InvalidPI)
}
// PI ::= '<?' PITarget (S (Char* - (Char* '?>' Char*)))? '?>'
// PITarget ::= Name - (('X' | 'x') ('M' | 'm') ('L' | 'l'))
fn parse_pi_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(2);
let target = s.consume_name()?;
s.skip_spaces();
let content = s.consume_chars(|s, c| !(c == '?' && s.starts_with(b"?>")))?;
let content = if !content.is_empty() {
Some(content)
} else {
None
};
s.skip_string(b"?>")?;
let span = s.slice_back(start);
Ok(Token::ProcessingInstruction { target, content, span })
}
fn parse_doctype(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_doctype_impl(s), s, InvalidDoctype)
}
// doctypedecl ::= '<!DOCTYPE' S Name (S ExternalID)? S? ('[' intSubset ']' S?)? '>'
fn parse_doctype_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(9);
s.consume_spaces()?;
let name = s.consume_name()?;
s.skip_spaces();
let external_id = Self::parse_external_id(s)?;
s.skip_spaces();
let c = s.curr_byte()?;
if c != b'[' && c != b'>' {
static EXPECTED: &[u8] = &[b'[', b'>'];
return Err(StreamError::InvalidCharMultiple(c, EXPECTED, s.gen_text_pos()));
}
s.advance(1);
let span = s.slice_back(start);
if c == b'[' {
Ok(Token::DtdStart { name, external_id, span })
} else {
Ok(Token::EmptyDtd { name, external_id, span })
}
}
// ExternalID ::= 'SYSTEM' S SystemLiteral | 'PUBLIC' S PubidLiteral S SystemLiteral
fn parse_external_id(s: &mut Stream<'a>) -> StreamResult<Option<ExternalId<'a>>> {
let v = if s.starts_with(b"SYSTEM") || s.starts_with(b"PUBLIC") {
let start = s.pos();
s.advance(6);
let id = s.slice_back(start);
s.consume_spaces()?;
let quote = s.consume_quote()?;
let literal1 = s.consume_bytes(|_, c| c != quote);
s.consume_byte(quote)?;
let v = if id.as_str() == "SYSTEM" {
ExternalId::System(literal1)
} else {
s.consume_spaces()?;
let quote = s.consume_quote()?;
let literal2 = s.consume_bytes(|_, c| c != quote);
s.consume_byte(quote)?;
ExternalId::Public(literal1, literal2)
};
Some(v)
} else {
None
};
Ok(v)
}
fn parse_entity_decl(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_entity_decl_impl(s), s, InvalidEntity)
}
// EntityDecl ::= GEDecl | PEDecl
// GEDecl ::= '<!ENTITY' S Name S EntityDef S? '>'
// PEDecl ::= '<!ENTITY' S '%' S Name S PEDef S? '>'
fn parse_entity_decl_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(8);
s.consume_spaces()?;
let is_ge = if s.try_consume_byte(b'%') {
s.consume_spaces()?;
false
} else {
true
};
let name = s.consume_name()?;
s.consume_spaces()?;
let definition = Self::parse_entity_def(s, is_ge)?;
s.skip_spaces();
s.consume_byte(b'>')?;
let span = s.slice_back(start);
Ok(Token::EntityDeclaration { name, definition, span })
}
// EntityDef ::= EntityValue | (ExternalID NDataDecl?)
// PEDef ::= EntityValue | ExternalID
// EntityValue ::= '"' ([^%&"] | PEReference | Reference)* '"' | "'" ([^%&']
// | PEReference | Reference)* "'"
// ExternalID ::= 'SYSTEM' S SystemLiteral | 'PUBLIC' S PubidLiteral S SystemLiteral
// NDataDecl ::= S 'NDATA' S Name
fn parse_entity_def(s: &mut Stream<'a>, is_ge: bool) -> StreamResult<EntityDefinition<'a>> {
let c = s.curr_byte()?;
match c {
b'"' | b'\'' => {
let quote = s.consume_quote()?;
let value = s.consume_bytes(|_, c| c != quote);
s.consume_byte(quote)?;
Ok(EntityDefinition::EntityValue(value))
}
b'S' | b'P' => {
if let Some(id) = Self::parse_external_id(s)? {
if is_ge {
s.skip_spaces();
if s.starts_with(b"NDATA") {
s.advance(5);
s.consume_spaces()?;
s.skip_name()?;
// TODO: NDataDecl is not supported
}
}
Ok(EntityDefinition::ExternalId(id))
} else {
Err(StreamError::InvalidExternalID)
}
}
_ => {
static EXPECTED: &[u8] = &[b'"', b'\'', b'S', b'P'];
let pos = s.gen_text_pos();
Err(StreamError::InvalidCharMultiple(c, EXPECTED, pos))
}
}
}
fn consume_decl(s: &mut Stream) -> StreamResult<()> {
s.skip_bytes(|_, c| c != b'>');
s.consume_byte(b'>')?;
Ok(())
}
fn parse_cdata(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_cdata_impl(s), s, InvalidCdata)
}
// CDSect ::= CDStart CData CDEnd
// CDStart ::= '<![CDATA['
// CData ::= (Char* - (Char* ']]>' Char*))
// CDEnd ::= ']]>'
fn parse_cdata_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(9);
let text = s.consume_chars(|s, c| !(c == ']' && s.starts_with(b"]]>")))?;
s.skip_string(b"]]>")?;
let span = s.slice_back(start);
Ok(Token::Cdata { text, span })
}
fn parse_element_start(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_element_start_impl(s), s, InvalidElement)
}
// '<' Name (S Attribute)* S? '>'
fn parse_element_start_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(1);
let (prefix, local) = s.consume_qname()?;
let span = s.slice_back(start);
Ok(Token::ElementStart { prefix, local, span })
}
fn parse_close_element(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_close_element_impl(s), s, InvalidElement)
}
// '</' Name S? '>'
fn parse_close_element_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let start = s.pos();
s.advance(2);
let (prefix, tag_name) = s.consume_qname()?;
s.skip_spaces();
s.consume_byte(b'>')?;
let span = s.slice_back(start);
Ok(Token::ElementEnd { end: ElementEnd::Close(prefix, tag_name), span })
}
// Name Eq AttValue
fn parse_attribute(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let attr_start = s.pos();
let has_space = s.starts_with_space();
s.skip_spaces();
if let Ok(c) = s.curr_byte() {
let start = s.pos();
match c {
b'/' => {
s.advance(1);
s.consume_byte(b'>')?;
let span = s.slice_back(start);
return Ok(Token::ElementEnd { end: ElementEnd::Empty, span });
}
b'>' => {
s.advance(1);
let span = s.slice_back(start);
return Ok(Token::ElementEnd { end: ElementEnd::Open, span });
}
_ => {}
}
}
if !has_space {
if !s.at_end() {
return Err(StreamError::InvalidSpace(
s.curr_byte_unchecked(), s.gen_text_pos_from(attr_start))
);
} else {
return Err(StreamError::UnexpectedEndOfStream);
}
}
let start = s.pos();
let (prefix, local) = s.consume_qname()?;
s.consume_eq()?;
let quote = s.consume_quote()?;
let quote_c = quote as char;
// The attribute value must not contain the < character.
let value = s.consume_chars(|_, c| c != quote_c && c != '<')?;
s.consume_byte(quote)?;
let span = s.slice_back(start);
Ok(Token::Attribute { prefix, local, value, span })
}
fn parse_text(s: &mut Stream<'a>) -> Result<Token<'a>> {
map_err_at!(Self::parse_text_impl(s), s, InvalidCharData)
}
fn parse_text_impl(s: &mut Stream<'a>) -> StreamResult<Token<'a>> {
let text = s.consume_chars(|_, c| c != '<')?;
// According to the spec, `]]>` must not appear inside a Text node.
// https://www.w3.org/TR/xml/#syntax
//
// Search for `>` first, since it's a bit faster than looking for `]]>`.
if text.as_str().contains('>') {
if text.as_str().contains("]]>") {
return Err(StreamError::InvalidCharacterData);
}
}
Ok(Token::Text { text })
}
}
impl<'a> Iterator for Tokenizer<'a> {
type Item = Result<Token<'a>>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.stream.at_end() || self.state == State::End {
return None;
}
let t = self.parse_next_impl();
if let Some(Err(_)) = t {
self.stream.jump_to_end();
self.state = State::End;
}
t
}
} | |
lib.rs | mod bench;
use gdnative::prelude::*;
fn init(handle: InitHandle) |
godot_gdnative_init!();
godot_nativescript_init!(init);
godot_gdnative_terminate!();
| {
handle.add_class::<bench::MyBench>();
} |
build.py | import os
import shutil
import subprocess
import sys
import glob
APK_NAME = "vulkanTexturearray"
SHADER_DIR = "texturearray"
ASSETS_TEXTURES = ["texturearray_bc3_unorm.ktx", "texturearray_astc_8x8_unorm.ktx", "texturearray_etc2_unorm.ktx"]
if subprocess.call("ndk-build", shell=True) == 0:
print("Build successful")
# Assets
if not os.path.exists("./assets"):
os.makedirs("./assets")
# Shaders
# Base
if not os.path.exists("./assets/shaders/base"):
os.makedirs("./assets/shaders/base")
for file in glob.glob("../../data/shaders/base/*.spv"): | shutil.copy(file, "./assets/shaders/base")
# Sample
if not os.path.exists("./assets/shaders/%s" % SHADER_DIR):
os.makedirs("./assets/shaders/%s" % SHADER_DIR)
for file in glob.glob("../../data/shaders/%s/*.spv" %SHADER_DIR):
shutil.copy(file, "./assets/shaders/%s" % SHADER_DIR)
# Textures
if not os.path.exists("./assets/textures"):
os.makedirs("./assets/textures")
for file in ASSETS_TEXTURES:
shutil.copy("../../data/textures/%s" % file, "./assets/textures")
# Icon
if not os.path.exists("./res/drawable"):
os.makedirs("./res/drawable")
shutil.copy("../../android/images/icon.png", "./res/drawable")
if subprocess.call("ant debug -Dout.final.file=%s.apk" % APK_NAME, shell=True) == 0:
if len(sys.argv) > 1:
if sys.argv[1] == "-deploy":
if subprocess.call("adb install -r %s.apk" % APK_NAME, shell=True) != 0:
print("Could not deploy to device!")
else:
print("Error during build process!")
else:
print("Error building project!") | |
drv_xmlproc.py | """
A SAX driver for xmlproc
$Id: drv_xmlproc.py,v 1.9 1999/10/15 07:55:33 larsga Exp $
"""
version="0.95"
from xml.sax import saxlib,saxutils,saxmisc
from xml.parsers.xmlproc import xmlproc
import os
pre_parse_properties={"http://xml.org/sax/properties/namespace-sep":1,
"http://xml.org/sax/handlers/DeclHandler":1,
"http://xml.org/sax/handlers/LexicalHandler":1,
"http://xml.org/sax/handlers/NamespaceHandler":1}
# Todo:
# - must actually use catalog file
# - document interplay between reset and SAX2
# - fix bugs:
# - startDTD must be called
# - do namespace processing, if it is requested
# - support more features and properties
XCATALOG =1
SOCATALOG=2
# --- SAX_XPParser
class SAX_XPParser(saxlib.Parser,xmlproc.Application,xmlproc.DTDConsumer,
xmlproc.ErrorHandler,xmlproc.PubIdResolver):
def __init__(self):
saxlib.Parser.__init__(self)
self.reset()
self.declHandler=saxmisc.DeclHandler()
self.lexicalHandler=saxmisc.LexicalHandler()
self.namespaceHandler=saxmisc.NamespaceHandler()
self.ns_separator=" "
self.locator=1
self.is_parsing=0
self.stop_on_error=1
self.catalog_file=None
self.catalog_type=None
def parse(self,sysID):
self.reset()
try:
self.is_parsing=1
self.parser.parse_resource(sysID)
finally:
self.is_parsing=0
def parseFile(self,file):
self.reset()
try:
self.is_parsing=1
self.parser.read_from(file)
self.parser.flush()
self.parser.parseEnd()
finally:
self.is_parsing=0
def _create_parser(self):
return xmlproc.XMLProcessor()
def setLocale(self, locale):
|
# --- data event methods
def doc_start(self):
if self.locator:
self.doc_handler.setDocumentLocator(self)
self.doc_handler.startDocument()
def doc_end(self):
self.doc_handler.endDocument()
def handle_data(self,data,start,end):
self.doc_handler.characters(data,start,end-start)
def handle_ignorable_data(self,data,start,end):
self.doc_handler.ignorableWhitespace(data,start,end-start)
def handle_pi(self, target, data):
self.doc_handler.processingInstruction(target,data)
def handle_start_tag(self, name, attrs):
self.doc_handler.startElement(name,saxutils.AttributeMap(attrs))
def handle_end_tag(self, name):
self.doc_handler.endElement(name)
def handle_comment(self,content):
self.lexicalHandler.comment(content,0,len(content))
# --- pubid resolution
def resolve_entity_pubid(self,pubid,sysid):
return self.ent_handler.resolveEntity(pubid,sysid)
def resolve_doctype_pubid(self,pubid,sysid):
return self.ent_handler.resolveEntity(pubid,sysid)
# --- error handling
def warning(self,msg):
self.err_handler.warning(saxlib.SAXParseException(msg,None,self))
def error(self,msg):
self.err_handler.error(saxlib.SAXParseException(msg,None,self))
def fatal(self,msg):
self.err_handler.fatalError(saxlib.SAXParseException(msg,None,self))
# --- location handling
def getColumnNumber(self):
return self.parser.get_column()
def getLineNumber(self):
return self.parser.get_line()
def getSystemId(self):
return self.parser.get_current_sysid()
# --- DTD parsing
def new_external_entity(self,name,pubid,sysid,ndata):
if ndata!="":
self.dtd_handler.unparsedEntityDecl(name,pubid,sysid,ndata)
else:
# FIXME: ensure that only first decl is passed on
self.declHandler.externalEntityDecl(name,pubid,sysid)
def new_notation(self,name,pubid,sysid):
self.dtd_handler.notationDecl(name,pubid,sysid)
def dtd_start(self):
self.lexicalHandler.startDTD("","","")
def dtd_end(self):
self.lexicalHandler.endDTD()
def new_general_entity(self,name,val):
# FIXME: ensure that only first decl is passed on
self.declHandler.internalEntityDecl(name,val)
def new_element_type(self,elem_name,elem_cont):
# FIXME: only first
self.declHandler.elementDecl(elem_name,elem_cont)
def new_attribute(self,elem,attr,a_type,a_decl,a_def):
# FIXME: only first
if a_decl=="#DEFAULT": a_decl=None
self.declHandler.attributeDecl(elem,attr,a_type,a_decl,a_def)
# --- entity events
def resolve_entity(self,pubid,sysid):
newsysid=self.ent_handler.resolveEntity(pubid,sysid)
if newsysid==None:
return sysid
else:
return newsysid
# --- EXPERIMENTAL PYTHON SAX EXTENSIONS:
def get_parser_name(self):
return "xmlproc"
def get_parser_version(self):
return xmlproc.version
def get_driver_version(self):
return version
def is_validating(self):
return 0
def is_dtd_reading(self):
return 1
def reset(self):
if hasattr(self, "parser"):
self.parser.deref()
self.parser=self._create_parser()
self.parser.set_application(self)
self.parser.set_dtd_listener(self) # FIXME: Should we always do this?
self.parser.set_error_handler(self)
self.parser.set_pubid_resolver(self)
self.parser.reset()
def feed(self,data):
self.parser.feed(data)
def close(self):
self.parser.close()
self.parser.deref()
# Dereferencing to avoid circular references (grrrr)
self.err_handler = self.dtd_handler = self.doc_handler = None
self.parser = self.locator = self.ent_handler = None
# --- Configurable methods
def getFeature(self, featureId):
if featureId=="http://xml.org/sax/features/use-locator":
return self.locator
elif featureId=="http://xml.org/sax/features/validation":
return 0
elif featureId=="http://garshol.priv.no/sax/stop-on-error":
return self.stop_on_error
elif featureId=="http://garshol.priv.no/sax/use-catalog":
return self.catalog_file
elif featureId=="http://xml.org/sax/features/external-general-entities" or \
featureId=="http://xml.org/sax/features/external-parameter-entities" or \
featureId=="http://xml.org/sax/features/namespaces" or \
featureId=="http://xml.org/sax/features/normalize-text":
raise saxlib.SAXNotSupportedException("Feature %s not supported" %
featureId)
else:
raise saxlib.SAXNotRecognizedException("Feature %s not recognized"
% featureId)
def setFeature(self, featureId, state):
if featureId=="http://xml.org/sax/features/use-locator":
self.locator=state
elif featureId=="http://garshol.priv.no/sax/stop-on-error":
self.stop_on_error=state
self.parser.set_data_after_wf_error(state)
elif featureId=="http://garshol.priv.no/sax/use-catalog":
if state:
if os.environ.has_key("XMLXCATALOG"):
self.catalog_file=os.environ["XMLXCATALOG"]
self.catalog_type=XCATALOG
elif os.environ.has_key("XMLSOCATALOG"):
self.catalog_file=os.environ["XMLSOCATALOG"]
self.catalog_type=SOCATALOG
else:
raise saxlib.SAXException("Neither XMLXCATALOG nor "
"XMLSOCATALOG variables set")
elif featureId=="http://xml.org/sax/features/validation" or \
featureId=="http://xml.org/sax/features/external-general-entities" or \
featureId=="http://xml.org/sax/features/external-parameter-entities" or \
featureId=="http://xml.org/sax/features/namespaces" or \
featureId=="http://xml.org/sax/features/normalize-text":
raise saxlib.SAXNotSupportedException("Feature %s not supported" %
featureId)
else:
raise saxlib.SAXNotRecognizedException("Feature %s not recognized"
% featureId)
def getProperty(self, propertyId):
if propertyId=="http://xml.org/sax/properties/namespace-sep":
return self.ns_separator
elif propertyId=="http://xml.org/sax/handlers/DeclHandler":
return self.declHandler
elif propertyId=="http://xml.org/sax/handlers/LexicalHandler":
return self.lexicalHandler
elif propertyId=="http://xml.org/sax/handlers/NamespaceHandler":
return self.namespaceHandler
elif propertyId=="http://xml.org/sax/properties/dom-node" or \
propertyId=="http://xml.org/sax/properties/xml-string":
raise saxlib.SAXNotSupportedException("Property %s not supported" %
propertyId)
elif propertyId=="http://garshol.priv.no/sax/xmlproc/dtdobj":
return self.parser.get_dtd()
elif propertyId=="http://garshol.priv.no/sax/catalog-file":
return self.catalog_file
else:
raise saxlib.SAXNotRecognizedException("Property %s not recognized"
% propertyId)
def setProperty(self, propertyId, value):
if pre_parse_properties.has_key(propertyId) and self.is_parsing:
raise saxlib.SAXNotSupportedException("Not allowed to set "
"property %s during parsing"
% propertyId)
if propertyId=="http://xml.org/sax/properties/namespace-sep":
self.ns_separator=value
elif propertyId=="http://xml.org/sax/handlers/DeclHandler":
self.declHandler=value
elif propertyId=="http://xml.org/sax/handlers/LexicalHandler":
self.lexicalHandler=value
elif propertyId=="http://xml.org/sax/handlers/NamespaceHandler":
self.namespaceHandler=value
elif propertyId=="http://garshol.priv.no/sax/catalog-file":
self.catalog_file=value
elif propertyId=="http://xml.org/sax/properties/dom-node" or \
propertyId=="http://garshol.priv.no/sax/xmlproc/dtdobj" or \
propertyId=="http://xml.org/sax/properties/xml-string":
raise saxlib.SAXNotSupportedException("Property is read-only")
else:
raise saxlib.SAXNotRecognizedException("Property %s not recognized"
% propertyId)
# --- Global functions
def create_parser():
return SAX_XPParser()
| try:
self.parser.set_error_language(locale)
except KeyError:
raise saxlib.SAXNotSupportedException("Locale '%s' not supported" % locale) |
Error.ts | export class | extends Error {
public userMessage?: string
constructor (message: string, userMessage?: string) {
super(message)
this.userMessage = userMessage
}
public static New (message: string, userMessage?: string) {
return new AttError(message, userMessage)
}
}
| AttError |
stakes.rs | //! Stakes serve as a cache of stake and vote accounts to derive
//! node stakes
use solana_sdk::account::Account;
use solana_sdk::clock::Epoch;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::sysvar::stake_history::StakeHistory;
use solana_stake_api::stake_state::{new_stake_history_entry, StakeState};
use solana_vote_api::vote_state::VoteState;
use std::collections::HashMap;
#[derive(Default, Clone, PartialEq, Debug, Deserialize, Serialize)]
pub struct Stakes {
/// vote accounts
vote_accounts: HashMap<Pubkey, (u64, Account)>,
/// stake_accounts
stake_accounts: HashMap<Pubkey, Account>,
/// unclaimed points.
// a point is a credit multiplied by the stake
points: u64,
/// current epoch, used to calculate current stake
epoch: Epoch,
/// history of staking levels
stake_history: StakeHistory,
}
impl Stakes {
pub fn history(&self) -> &StakeHistory {
&self.stake_history
}
pub fn clone_with_epoch(&self, epoch: Epoch) -> Self {
if self.epoch == epoch {
self.clone()
} else {
let mut stake_history = self.stake_history.clone();
stake_history.add(
self.epoch,
new_stake_history_entry(
self.epoch,
self.stake_accounts
.iter()
.filter_map(|(_pubkey, stake_account)| {
StakeState::stake_from(stake_account)
})
.collect::<Vec<_>>()
.iter(),
Some(&self.stake_history),
),
);
Stakes {
stake_accounts: self.stake_accounts.clone(),
points: self.points,
epoch,
vote_accounts: self
.vote_accounts
.iter()
.map(|(pubkey, (_stake, account))| {
( | account.clone(),
),
)
})
.collect(),
stake_history,
}
}
}
// sum the stakes that point to the given voter_pubkey
fn calculate_stake(
&self,
voter_pubkey: &Pubkey,
epoch: Epoch,
stake_history: Option<&StakeHistory>,
) -> u64 {
self.stake_accounts
.iter()
.map(|(_, stake_account)| {
StakeState::stake_from(stake_account).map_or(0, |stake| {
if &stake.voter_pubkey == voter_pubkey {
stake.stake(epoch, stake_history)
} else {
0
}
})
})
.sum()
}
pub fn is_stake(account: &Account) -> bool {
solana_vote_api::check_id(&account.owner)
|| solana_stake_api::check_id(&account.owner)
&& account.data.len() >= std::mem::size_of::<StakeState>()
}
pub fn store(&mut self, pubkey: &Pubkey, account: &Account) {
if solana_vote_api::check_id(&account.owner) {
if account.lamports == 0 {
self.vote_accounts.remove(pubkey);
} else {
let old = self.vote_accounts.get(pubkey);
let stake = old.map_or_else(
|| self.calculate_stake(pubkey, self.epoch, Some(&self.stake_history)),
|v| v.0,
);
// count any increase in points, can only go forward
let old_credits = old
.and_then(|(_stake, old_account)| VoteState::credits_from(old_account))
.unwrap_or(0);
let credits = VoteState::credits_from(account).unwrap_or(old_credits);
self.points += credits.saturating_sub(old_credits) * stake;
self.vote_accounts.insert(*pubkey, (stake, account.clone()));
}
} else if solana_stake_api::check_id(&account.owner) {
// old_stake is stake lamports and voter_pubkey from the pre-store() version
let old_stake = self.stake_accounts.get(pubkey).and_then(|old_account| {
StakeState::stake_from(old_account).map(|stake| {
(
stake.voter_pubkey,
stake.stake(self.epoch, Some(&self.stake_history)),
)
})
});
let stake = StakeState::stake_from(account).map(|stake| {
(
stake.voter_pubkey,
if account.lamports != 0 {
stake.stake(self.epoch, Some(&self.stake_history))
} else {
0
},
)
});
// if adjustments need to be made...
if stake != old_stake {
if let Some((voter_pubkey, stake)) = old_stake {
self.vote_accounts
.entry(voter_pubkey)
.and_modify(|e| e.0 -= stake);
}
if let Some((voter_pubkey, stake)) = stake {
self.vote_accounts
.entry(voter_pubkey)
.and_modify(|e| e.0 += stake);
}
}
if account.lamports == 0 {
self.stake_accounts.remove(pubkey);
} else {
self.stake_accounts.insert(*pubkey, account.clone());
}
}
}
pub fn vote_accounts(&self) -> &HashMap<Pubkey, (u64, Account)> {
&self.vote_accounts
}
pub fn stake_accounts(&self) -> &HashMap<Pubkey, Account> {
&self.stake_accounts
}
pub fn rewards_pools(&self) -> impl Iterator<Item = (&Pubkey, &Account)> {
self.stake_accounts
.iter()
.filter(|(_key, account)| match StakeState::from(account) {
Some(StakeState::RewardsPool) => true,
_ => false,
})
}
pub fn highest_staked_node(&self) -> Option<Pubkey> {
self.vote_accounts
.iter()
.max_by(|(_ak, av), (_bk, bv)| av.0.cmp(&bv.0))
.and_then(|(_k, (_stake, account))| VoteState::from(account))
.map(|vote_state| vote_state.node_pubkey)
}
/// currently unclaimed points
pub fn points(&self) -> u64 {
self.points
}
/// "claims" points, resets points to 0
pub fn claim_points(&mut self) -> u64 {
let points = self.points;
self.points = 0;
points
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use solana_sdk::pubkey::Pubkey;
use solana_stake_api::stake_state;
use solana_vote_api::vote_state::{self, VoteState, MAX_LOCKOUT_HISTORY};
// set up some dummies for a staked node (( vote ) ( stake ))
pub fn create_staked_node_accounts(stake: u64) -> ((Pubkey, Account), (Pubkey, Account)) {
let vote_pubkey = Pubkey::new_rand();
let vote_account = vote_state::create_account(&vote_pubkey, &Pubkey::new_rand(), 0, 1);
(
(vote_pubkey, vote_account),
create_stake_account(stake, &vote_pubkey),
)
}
// add stake to a vote_pubkey ( stake )
pub fn create_stake_account(stake: u64, vote_pubkey: &Pubkey) -> (Pubkey, Account) {
let stake_pubkey = Pubkey::new_rand();
(
stake_pubkey,
stake_state::create_account(
&stake_pubkey,
&vote_pubkey,
&vote_state::create_account(&vote_pubkey, &Pubkey::new_rand(), 0, 1),
stake,
),
)
}
#[test]
fn test_stakes_basic() {
for i in 0..4 {
let mut stakes = Stakes::default();
stakes.epoch = i;
let ((vote_pubkey, vote_account), (stake_pubkey, mut stake_account)) =
create_staked_node_accounts(10);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&stake_pubkey, &stake_account);
let stake = StakeState::stake_from(&stake_account).unwrap();
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(
vote_accounts.get(&vote_pubkey).unwrap().0,
stake.stake(i, None)
);
}
stake_account.lamports = 42;
stakes.store(&stake_pubkey, &stake_account);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(
vote_accounts.get(&vote_pubkey).unwrap().0,
stake.stake(i, None)
); // stays old stake, because only 10 is activated
}
// activate more
let (_stake_pubkey, mut stake_account) = create_stake_account(42, &vote_pubkey);
stakes.store(&stake_pubkey, &stake_account);
let stake = StakeState::stake_from(&stake_account).unwrap();
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(
vote_accounts.get(&vote_pubkey).unwrap().0,
stake.stake(i, None)
); // now stake of 42 is activated
}
stake_account.lamports = 0;
stakes.store(&stake_pubkey, &stake_account);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 0);
}
}
}
#[test]
fn test_stakes_highest() {
let mut stakes = Stakes::default();
assert_eq!(stakes.highest_staked_node(), None);
let ((vote_pubkey, vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(10);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&stake_pubkey, &stake_account);
let ((vote11_pubkey, vote11_account), (stake11_pubkey, stake11_account)) =
create_staked_node_accounts(20);
stakes.store(&vote11_pubkey, &vote11_account);
stakes.store(&stake11_pubkey, &stake11_account);
let vote11_node_pubkey = VoteState::from(&vote11_account).unwrap().node_pubkey;
assert_eq!(stakes.highest_staked_node(), Some(vote11_node_pubkey))
}
#[test]
fn test_stakes_points() {
let mut stakes = Stakes::default();
stakes.epoch = 4;
let stake = 42;
assert_eq!(stakes.points(), 0);
assert_eq!(stakes.claim_points(), 0);
assert_eq!(stakes.claim_points(), 0);
let ((vote_pubkey, mut vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(stake);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&stake_pubkey, &stake_account);
assert_eq!(stakes.points(), 0);
assert_eq!(stakes.claim_points(), 0);
let mut vote_state = VoteState::from(&vote_account).unwrap();
for i in 0..MAX_LOCKOUT_HISTORY + 42 {
vote_state.process_slot_vote_unchecked(i as u64);
vote_state.to(&mut vote_account).unwrap();
stakes.store(&vote_pubkey, &vote_account);
assert_eq!(stakes.points(), vote_state.credits() * stake);
}
vote_account.lamports = 0;
stakes.store(&vote_pubkey, &vote_account);
assert_eq!(stakes.points(), vote_state.credits() * stake);
assert_eq!(stakes.claim_points(), vote_state.credits() * stake);
assert_eq!(stakes.claim_points(), 0);
assert_eq!(stakes.claim_points(), 0);
// points come out of nowhere, but don't care here ;)
vote_account.lamports = 1;
stakes.store(&vote_pubkey, &vote_account);
assert_eq!(stakes.points(), vote_state.credits() * stake);
// test going backwards, should never go backwards
let old_vote_state = vote_state;
let vote_account = vote_state::create_account(&vote_pubkey, &Pubkey::new_rand(), 0, 1);
stakes.store(&vote_pubkey, &vote_account);
assert_eq!(stakes.points(), old_vote_state.credits() * stake);
}
#[test]
fn test_stakes_vote_account_disappear_reappear() {
let mut stakes = Stakes::default();
stakes.epoch = 4;
let ((vote_pubkey, mut vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(10);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&stake_pubkey, &stake_account);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 10);
}
vote_account.lamports = 0;
stakes.store(&vote_pubkey, &vote_account);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_none());
}
vote_account.lamports = 1;
stakes.store(&vote_pubkey, &vote_account);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 10);
}
}
#[test]
fn test_stakes_change_delegate() {
let mut stakes = Stakes::default();
stakes.epoch = 4;
let ((vote_pubkey, vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(10);
let ((vote_pubkey2, vote_account2), (_stake_pubkey2, stake_account2)) =
create_staked_node_accounts(10);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&vote_pubkey2, &vote_account2);
// delegates to vote_pubkey
stakes.store(&stake_pubkey, &stake_account);
let stake = StakeState::stake_from(&stake_account).unwrap();
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(
vote_accounts.get(&vote_pubkey).unwrap().0,
stake.stake(stakes.epoch, Some(&stakes.stake_history))
);
assert!(vote_accounts.get(&vote_pubkey2).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey2).unwrap().0, 0);
}
// delegates to vote_pubkey2
stakes.store(&stake_pubkey, &stake_account2);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 0);
assert!(vote_accounts.get(&vote_pubkey2).is_some());
assert_eq!(
vote_accounts.get(&vote_pubkey2).unwrap().0,
stake.stake(stakes.epoch, Some(&stakes.stake_history))
);
}
}
#[test]
fn test_stakes_multiple_stakers() {
let mut stakes = Stakes::default();
stakes.epoch = 4;
let ((vote_pubkey, vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(10);
let (stake_pubkey2, stake_account2) = create_stake_account(10, &vote_pubkey);
stakes.store(&vote_pubkey, &vote_account);
// delegates to vote_pubkey
stakes.store(&stake_pubkey, &stake_account);
stakes.store(&stake_pubkey2, &stake_account2);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 20);
}
}
#[test]
fn test_clone_with_epoch() {
let mut stakes = Stakes::default();
let ((vote_pubkey, vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(10);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&stake_pubkey, &stake_account);
let stake = StakeState::stake_from(&stake_account).unwrap();
{
let vote_accounts = stakes.vote_accounts();
assert_eq!(
vote_accounts.get(&vote_pubkey).unwrap().0,
stake.stake(stakes.epoch, Some(&stakes.stake_history))
);
}
let stakes = stakes.clone_with_epoch(3);
{
let vote_accounts = stakes.vote_accounts();
assert_eq!(
vote_accounts.get(&vote_pubkey).unwrap().0,
stake.stake(stakes.epoch, Some(&stakes.stake_history))
);
}
}
#[test]
fn test_stakes_not_delegate() {
let mut stakes = Stakes::default();
stakes.epoch = 4;
let ((vote_pubkey, vote_account), (stake_pubkey, stake_account)) =
create_staked_node_accounts(10);
stakes.store(&vote_pubkey, &vote_account);
stakes.store(&stake_pubkey, &stake_account);
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 10);
}
// not a stake account, and whacks above entry
stakes.store(&stake_pubkey, &Account::new(1, 0, &solana_stake_api::id()));
{
let vote_accounts = stakes.vote_accounts();
assert!(vote_accounts.get(&vote_pubkey).is_some());
assert_eq!(vote_accounts.get(&vote_pubkey).unwrap().0, 0);
}
}
} | *pubkey,
(
self.calculate_stake(pubkey, epoch, Some(&stake_history)), |
fcntl.rs | use super::flock::flock;
use super::*;
use util::mem_util::from_user;
#[derive(Debug)]
pub enum FcntlCmd<'a> {
/// Duplicate the file descriptor fd using the lowest-numbered available
/// file descriptor greater than or equal to arg.
DupFd(FileDesc),
/// As for `DupFd`, but additionally set the close-on-exec flag for the
/// duplicate file descriptor.
DupFdCloexec(FileDesc),
/// Return (as the function result) the file descriptor flags
GetFd(),
/// Set the file descriptor to be close-on-exec or not
SetFd(u32),
/// Get the file status flags
GetFl(),
/// Set the file status flags
SetFl(u32),
/// Test a file lock
GetLk(&'a mut flock),
/// Acquire or release a file lock
SetLk(&'a flock),
}
impl<'a> FcntlCmd<'a> {
#[deny(unreachable_patterns)]
pub fn from_raw(cmd: u32, arg: u64) -> Result<FcntlCmd<'a>> {
Ok(match cmd as c_int {
libc::F_DUPFD => FcntlCmd::DupFd(arg as FileDesc),
libc::F_DUPFD_CLOEXEC => FcntlCmd::DupFdCloexec(arg as FileDesc),
libc::F_GETFD => FcntlCmd::GetFd(),
libc::F_SETFD => FcntlCmd::SetFd(arg as u32),
libc::F_GETFL => FcntlCmd::GetFl(),
libc::F_SETFL => FcntlCmd::SetFl(arg as u32),
libc::F_GETLK => {
let flock_mut_ptr = arg as *mut flock;
from_user::check_mut_ptr(flock_mut_ptr)?;
let flock_mut_c = unsafe { &mut *flock_mut_ptr };
FcntlCmd::GetLk(flock_mut_c)
}
libc::F_SETLK => {
let flock_ptr = arg as *const flock;
from_user::check_ptr(flock_ptr)?;
let flock_c = unsafe { &*flock_ptr };
FcntlCmd::SetLk(flock_c)
}
_ => return_errno!(EINVAL, "unsupported command"),
})
}
}
pub fn | (fd: FileDesc, cmd: &mut FcntlCmd) -> Result<isize> {
println!("fcntl: fd: {:?}, cmd: {:?}", &fd, cmd);
let current = current!();
let mut file_table = current.files().lock().unwrap();
let ret = match cmd {
FcntlCmd::DupFd(min_fd) => {
let dup_fd = file_table.dup(fd, *min_fd, false)?;
dup_fd as isize
}
FcntlCmd::DupFdCloexec(min_fd) => {
let dup_fd = file_table.dup(fd, *min_fd, true)?;
dup_fd as isize
}
FcntlCmd::GetFd() => {
let entry = file_table.get_entry(fd)?;
let fd_flags = if entry.is_close_on_spawn() {
libc::FD_CLOEXEC
} else {
0
};
fd_flags as isize
}
FcntlCmd::SetFd(fd_flags) => {
let entry = file_table.get_entry_mut(fd)?;
entry.set_close_on_spawn((*fd_flags & libc::FD_CLOEXEC as u32) != 0);
0
}
FcntlCmd::GetFl() => {
let file = file_table.get(fd)?;
let status_flags = file.get_status_flags()?;
let access_mode = file.get_access_mode()?;
(status_flags.bits() | access_mode as u32) as isize
}
FcntlCmd::SetFl(flags) => {
let file = file_table.get(fd)?;
let status_flags = StatusFlags::from_bits_truncate(*flags);
file.set_status_flags(status_flags)?;
0
}
FcntlCmd::GetLk(flock_mut_c) => {
let file = file_table.get(fd)?;
let mut lock = Flock::from_c(*flock_mut_c)?;
if let FlockType::F_UNLCK = lock.l_type {
return_errno!(EINVAL, "invalid flock type for getlk");
}
file.test_advisory_lock(&mut lock)?;
(*flock_mut_c).copy_from_safe(&lock);
0
}
FcntlCmd::SetLk(flock_c) => {
let file = file_table.get(fd)?;
let lock = Flock::from_c(*flock_c)?;
file.set_advisory_lock(&lock)?;
0
}
};
Ok(ret)
}
| do_fcntl |
app-routing.module.ts | import {NgModule} from '@angular/core';
import {Routes, RouterModule} from '@angular/router';
import {MainComponent} from '@modules/main/main.component';
import {BlankComponent} from '@pages/blank/blank.component';
import {LoginComponent} from '@modules/login/login.component';
import {ProfileComponent} from '@pages/profile/profile.component';
import {RegisterComponent} from '@modules/register/register.component';
import {DashboardComponent} from '@pages/dashboard/dashboard.component';
import {AuthGuard} from '@guards/auth.guard';
import {NonAuthGuard} from '@guards/non-auth.guard';
import {ForgotPasswordComponent} from '@modules/forgot-password/forgot-password.component';
import {RecoverPasswordComponent} from '@modules/recover-password/recover-password.component';
import {PrivacyPolicyComponent} from '@modules/privacy-policy/privacy-policy.component';
import { EnrollCertificateComponent } from '@pages/enroll-certificate/enroll-certificate.component';
import { RequestListComponent } from '@pages/request-list/request-list.component';
import { AdminCertRequestDetailComponent } from '@pages/admin-cert-request/admin-cert-request-detail.component';
import { ListCertificateUserComponent } from '@pages/list-certificate-user/list-certificate-user.component';
import { ListCertificateAdminComponent } from '@pages/list-certificate-admin/list-certificate-admin.component';
import { ChangePasswordComponent } from '@pages/change-password/change-password.component';
const routes: Routes = [
{
path: '',
component: MainComponent,
canActivate: [AuthGuard],
canActivateChild: [AuthGuard],
children: [
{
path: 'profile',
component: ProfileComponent
},
{
path: 'blank',
component: BlankComponent
},
{
path: '',
component: DashboardComponent
},
{
path: 'add-certificate',
component: EnrollCertificateComponent
},
{
path: 'request-list',
component: RequestListComponent
},
{
path: 'request-list-detail-admin',
component: AdminCertRequestDetailComponent
},
{
path: 'user-certificate',
component: ListCertificateUserComponent
},
{
path: 'all-user-certificate',
component: ListCertificateAdminComponent
},
{
path: 'change-password',
component: ChangePasswordComponent
},
]
},
{
path: 'login',
component: LoginComponent,
canActivate: [NonAuthGuard]
},
{
path: 'register',
component: RegisterComponent,
canActivate: [NonAuthGuard]
},
{
path: 'forgot-password',
component: ForgotPasswordComponent,
canActivate: [NonAuthGuard]
},
{
path: 'recover-password',
component: RecoverPasswordComponent,
canActivate: [NonAuthGuard]
},
{
path: 'privacy-policy',
component: PrivacyPolicyComponent,
canActivate: [NonAuthGuard]
},
{path: '**', redirectTo: ''}
];
@NgModule({
imports: [RouterModule.forRoot(routes, {relativeLinkResolution: 'legacy'})],
exports: [RouterModule]
})
export class | {}
| AppRoutingModule |
timer.py | """Timer class based on the timeit.Timer class, but torch aware."""
import enum
import timeit
import textwrap
from typing import Any, Callable, Dict, List, NoReturn, Optional, Type, Union
import numpy as np
import torch
from torch.utils.benchmark.utils import common, cpp_jit
from torch.utils.benchmark.utils._stubs import TimerClass, TimeitModuleType
from torch.utils.benchmark.utils.valgrind_wrapper import timer_interface as valgrind_timer_interface
__all__ = ["Timer", "timer", "Language"]
if torch.has_cuda and torch.cuda.is_available():
def timer() -> float:
torch.cuda.synchronize()
return timeit.default_timer()
else:
timer = timeit.default_timer
class Language(enum.Enum):
PYTHON = 0
CPP = 1
class CPPTimer:
def __init__(
self,
stmt: str,
setup: str,
timer: Callable[[], float],
globals: Dict[str, Any],
) -> None:
if timer is not timeit.default_timer:
raise NotImplementedError(
"PyTorch was built with CUDA and a GPU is present; however "
"Timer does not yet support GPU measurements. If your "
"code is CPU only, pass `timer=timeit.default_timer` to the "
"Timer's constructor to indicate this. (Note that this will "
"produce incorrect results if the GPU is in fact used, as "
"Timer will not synchronize CUDA.)"
)
if globals:
raise ValueError("C++ timing does not support globals.")
self._stmt: str = textwrap.dedent(stmt)
self._setup: str = textwrap.dedent(setup)
self._timeit_module: Optional[TimeitModuleType] = None
def timeit(self, number: int) -> float:
if self._timeit_module is None:
self._timeit_module = cpp_jit.compile_timeit_template(
self._stmt,
self._setup,
)
return self._timeit_module.timeit(number)
class Timer(object):
"""Helper class for measuring execution time of PyTorch statements.
For a full tutorial on how to use this class, see:
https://pytorch.org/tutorials/recipes/recipes/benchmark.html
The PyTorch Timer is based on `timeit.Timer` (and in fact uses
`timeit.Timer` internally), but with several key differences:
1) Runtime aware:
Timer will perform warmups (important as some elements of PyTorch are
lazily initialized), set threadpool size so that comparisons are
apples-to-apples, and synchronize asynchronous CUDA functions when
necessary.
2) Focus on replicates:
When measuring code, and particularly complex kernels / models,
run-to-run variation is a significant confounding factor. It is
expected that all measurements should include replicates to quantify
noise and allow median computation, which is more robust than mean.
To that effect, this class deviates from the `timeit` API by
conceptually merging `timeit.Timer.repeat` and `timeit.Timer.autorange`.
(Exact algorithms are discussed in method docstrings.) The `timeit`
method is replicated for cases where an adaptive strategy is not
desired.
3) Optional metadata:
When defining a Timer, one can optionally specify `label`, `sub_label`,
`description`, and `env`. (Defined later) These fields are included in
the representation of result object and by the `Compare` class to group
and display results for comparison.
4) Instruction counts
In addition to wall times, Timer can run a statement under Callgrind
and report instructions executed.
Directly analogous to `timeit.Timer` constructor arguments:
`stmt`, `setup`, `timer`, `globals`
PyTorch Timer specific constructor arguments:
`label`, `sub_label`, `description`, `env`, `num_threads`
Args:
stmt: Code snippet to be run in a loop and timed.
setup: Optional setup code. Used to define variables used in `stmt`
timer:
Callable which returns the current time. If PyTorch was built
without CUDA or there is no GPU present, this defaults to
`timeit.default_timer`; otherwise it will synchronize CUDA before
measuring the time.
globals:
A dict which defines the global variables when `stmt` is being
executed. This is the other method for providing variables which
`stmt` needs.
label:
String which summarizes `stmt`. For instance, if `stmt` is
"torch.nn.functional.relu(torch.add(x, 1, out=out))"
one might set label to "ReLU(x + 1)" to improve readability.
sub_label:
Provide supplemental information to disambiguate measurements
with identical stmt or label. For instance, in our example
above sub_label might be "float" or "int", so that it is easy
to differentiate:
"ReLU(x + 1): (float)"
"ReLU(x + 1): (int)"
when printing Measurements or summarizing using `Compare`.
description:
String to distinguish measurements with identical label and
sub_label. The principal use of `description` is to signal to
`Compare` the columns of data. For instance one might set it
based on the input size to create a table of the form: ::
| n=1 | n=4 | ...
------------- ...
ReLU(x + 1): (float) | ... | ... | ...
ReLU(x + 1): (int) | ... | ... | ...
using `Compare`. It is also included when printing a Measurement.
env:
This tag indicates that otherwise identical tasks were run in
different environments, and are therefore not equivilent, for
instance when A/B testing a change to a kernel. `Compare` will
treat Measurements with different `env` specification as distinct
when merging replicate runs.
num_threads:
The size of the PyTorch threadpool when executing `stmt`. Single
threaded performace is important as both a key inference workload
and a good indicator of intrinsic algorithmic efficiency, so the
default is set to one. This is in contrast to the default PyTorch
threadpool size which tries to utilize all cores.
"""
_timer_cls: Type[TimerClass] = timeit.Timer
def __init__(
self,
stmt: str = "pass",
setup: str = "pass",
timer: Callable[[], float] = timer,
globals: Optional[Dict[str, Any]] = None,
label: Optional[str] = None,
sub_label: Optional[str] = None,
description: Optional[str] = None,
env: Optional[str] = None,
num_threads: int = 1,
language: Union[Language, str] = Language.PYTHON,
):
if not isinstance(stmt, str):
raise ValueError("Currently only a `str` stmt is supported.")
# We copy `globals` to prevent mutations from leaking.
# (For instance, `eval` adds the `__builtins__` key)
self._globals = dict(globals or {})
if language in (Language.PYTHON, "py", "python"):
# Include `torch` if not specified as a convenience feature.
self._globals.setdefault("torch", torch)
self._language: Language = Language.PYTHON
elif language in (Language.CPP, "cpp", "c++"):
assert self._timer_cls is timeit.Timer, "_timer_cls has already been swapped."
self._timer_cls = CPPTimer
setup = ("" if setup == "pass" else setup)
self._language = Language.CPP
else:
raise ValueError(f"Invalid language `{language}`.")
# Convenience adjustment so that multi-line code snippets defined in
# functions do not IndentationError (Python) or look odd (C++). The
# leading newline removal is for the initial newline that appears when
# defining block strings. For instance:
# textwrap.dedent("""
# print("This is a stmt")
# """)
# produces '\nprint("This is a stmt")\n'.
#
# Stripping this down to 'print("This is a stmt")' doesn't change
# what gets executed, but it makes __repr__'s nicer.
stmt = textwrap.dedent(stmt)
stmt = (stmt[1:] if stmt and stmt[0] == "\n" else stmt).rstrip()
setup = textwrap.dedent(setup)
setup = (setup[1:] if setup and setup[0] == "\n" else setup).rstrip()
self._timer = self._timer_cls(
stmt=stmt,
setup=setup,
timer=timer,
globals=valgrind_timer_interface.CopyIfCallgrind.unwrap_all(self._globals),
)
self._task_spec = common.TaskSpec(
stmt=stmt,
setup=setup,
label=label,
sub_label=sub_label,
description=description,
env=env,
num_threads=num_threads,
)
def timeit(self, number: int = 1000000) -> common.Measurement:
|
def repeat(self, repeat: int = -1, number: int = -1) -> None:
raise NotImplementedError("See `Timer.blocked_autorange.`")
def autorange(self, callback: Optional[Callable[[int, float], NoReturn]] = None) -> None:
raise NotImplementedError("See `Timer.blocked_autorange.`")
def _threaded_measurement_loop(
self,
number: int,
time_hook: Callable[[], float],
stop_hook: Callable[[List[float]], bool],
min_run_time: float,
max_run_time: Optional[float] = None,
callback: Optional[Callable[[int, float], NoReturn]] = None
) -> List[float]:
total_time = 0.0
can_stop = False
times: List[float] = []
with common.set_torch_threads(self._task_spec.num_threads):
while (total_time < min_run_time) or (not can_stop):
time_spent = time_hook()
times.append(time_spent)
total_time += time_spent
if callback:
callback(number, time_spent)
can_stop = stop_hook(times)
if max_run_time and total_time > max_run_time:
break
return times
def _estimate_block_size(self, min_run_time: float) -> int:
with common.set_torch_threads(self._task_spec.num_threads):
# Estimate the block size needed for measurement to be negligible
# compared to the inner loop. This also serves as a warmup.
overhead = np.median([self._timer.timeit(0) for _ in range(5)])
number = 1
while True:
time_taken = self._timer.timeit(number)
relative_overhead = overhead / time_taken
if relative_overhead <= 1e-4 and time_taken >= min_run_time / 1000:
break
if time_taken > min_run_time:
break
number *= 10
return number
def adaptive_autorange(
self,
threshold: float = 0.1,
*,
min_run_time: float = 0.01,
max_run_time: float = 10.0,
callback: Optional[Callable[[int, float], NoReturn]] = None,
) -> common.Measurement:
number = self._estimate_block_size(min_run_time=0.05)
def time_hook() -> float:
return self._timer.timeit(number)
def stop_hook(times: List[float]) -> bool:
if len(times) > 3:
return common.Measurement(
number_per_run=number,
raw_times=times,
task_spec=self._task_spec
).meets_confidence(threshold=threshold)
return False
times = self._threaded_measurement_loop(
number, time_hook, stop_hook, min_run_time, max_run_time, callback=callback)
return common.Measurement(
number_per_run=number,
raw_times=times,
task_spec=self._task_spec
)
def blocked_autorange(
self,
callback: Optional[Callable[[int, float], NoReturn]] = None,
min_run_time: float = 0.2,
) -> common.Measurement:
"""Measure many replicates while keeping timer overhead to a minimum.
At a high level, blocked_autorange executes the following pseudo-code::
`setup`
total_time = 0
while total_time < min_run_time
start = timer()
for _ in range(block_size):
`stmt`
total_time += (timer() - start)
Note the variable `block_size` in the inner loop. The choice of block
size is important to measurement quality, and must balance two
competing objectives:
1) A small block size results in more replicates and generally
better statistics.
2) A large block size better amortizes the cost of `timer`
invocation, and results in a less biased measurement. This is
important because CUDA syncronization time is non-trivial
(order single to low double digit microseconds) and would
otherwise bias the measurement.
blocked_autorange sets block_size by running a warmup period,
increasing block size until timer overhead is less than 0.1% of
the overall computation. This value is then used for the main
measurement loop.
Returns:
A `Measurement` object that contains measured runtimes and
repetition counts, and can be used to compute statistics.
(mean, median, etc.)
"""
number = self._estimate_block_size(min_run_time)
def time_hook() -> float:
return self._timer.timeit(number)
def stop_hook(times: List[float]) -> bool:
return True
times = self._threaded_measurement_loop(
number, time_hook, stop_hook,
min_run_time=min_run_time,
callback=callback)
return common.Measurement(
number_per_run=number,
raw_times=times,
task_spec=self._task_spec
)
def collect_callgrind(
self,
number: int = 100,
collect_baseline: bool = True
) -> valgrind_timer_interface.CallgrindStats:
"""Collect instruction counts using Callgrind.
Unlike wall times, instruction counts are deterministic
(modulo non-determinism in the program itself and small amounts of
jitter from the Python interpreter.) This makes them ideal for detailed
performance analysis. This method runs `stmt` in a separate process
so that Valgrind can instrument the program. Performance is severely
degraded due to the instrumentation, howevever this is ameliorated by
the fact that a small number of iterations is generally sufficient to
obtain good measurements.
In order to to use this method `valgrind`, `callgrind_control`, and
`callgrind_annotate` must be installed.
Because there is a process boundary between the caller (this process)
and the `stmt` execution, `globals` cannot contain arbitrary in-memory
data structures. (Unlike timing methods) Instead, globals are
restricted to builtins, `nn.Modules`'s, and TorchScripted functions/modules
to reduce the surprise factor from serialization and subsequent
deserialization. The `GlobalsBridge` class provides more detail on this
subject. Take particular care with nn.Modules: they rely on pickle and
you may need to add an import to `setup` for them to transfer properly.
By default, a profile for an empty statement will be collected and
cached to indicate how many instructions are from the Python loop which
drives `stmt`.
Returns:
A `CallgrindStats` object which provides instruction counts and
some basic facilities for analyzing and manipulating results.
"""
if not isinstance(self._task_spec.stmt, str):
raise ValueError("`collect_callgrind` currently only supports string `stmt`")
# Check that the statement is valid. It doesn't guarantee success, but it's much
# simpler and quicker to raise an exception for a faulty `stmt` or `setup` in
# the parent process rather than the valgrind subprocess.
self._timer.timeit(1)
is_python = (self._language == Language.PYTHON)
assert is_python or not self._globals
return valgrind_timer_interface.wrapper_singleton().collect_callgrind(
task_spec=self._task_spec,
globals=self._globals,
number=number,
collect_baseline=collect_baseline and is_python,
is_python=is_python)
| """Mirrors the semantics of timeit.Timer.timeit().
Execute the main statement (`stmt`) `number` times.
https://docs.python.org/3/library/timeit.html#timeit.Timer.timeit
"""
with common.set_torch_threads(self._task_spec.num_threads):
# Warmup
self._timer.timeit(number=max(int(number // 100), 1))
return common.Measurement(
number_per_run=number,
raw_times=[self._timer.timeit(number=number)],
task_spec=self._task_spec
) |
uvll.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Low-level bindings to the libuv library.
*
* This module contains a set of direct, 'bare-metal' wrappers around
* the libuv C-API.
*
* We're not bothering yet to redefine uv's structs as Rust structs
* because they are quite large and change often between versions.
* The maintenance burden is just too high. Instead we use the uv's
* `uv_handle_size` and `uv_req_size` to find the correct size of the
* structs and allocate them on the heap. This can be revisited later.
*
* There are also a collection of helper functions to ease interacting
* with the low-level API.
*
* As new functionality, existent in uv.h, is added to the rust stdlib,
* the mappings should be added in this module.
*/
#[allow(non_camel_case_types)]; // C types
use c_str::ToCStr;
use libc::{size_t, c_int, c_uint, c_void, c_char, uintptr_t};
use libc::ssize_t;
use libc::{malloc, free};
use libc;
use prelude::*;
use ptr;
use vec;
pub use self::errors::*;
pub static OK: c_int = 0;
pub static EOF: c_int = -4095;
pub static UNKNOWN: c_int = -4094;
// uv-errno.h redefines error codes for windows, but not for unix...
#[cfg(windows)]
pub mod errors {
use libc::c_int;
pub static EACCES: c_int = -4093;
pub static ECONNREFUSED: c_int = -4079;
pub static ECONNRESET: c_int = -4078;
pub static EPIPE: c_int = -4048;
}
#[cfg(not(windows))]
pub mod errors {
use libc;
use libc::c_int;
pub static EACCES: c_int = -libc::EACCES;
pub static ECONNREFUSED: c_int = -libc::ECONNREFUSED;
pub static ECONNRESET: c_int = -libc::ECONNRESET;
pub static EPIPE: c_int = -libc::EPIPE;
}
pub static PROCESS_SETUID: c_int = 1 << 0;
pub static PROCESS_SETGID: c_int = 1 << 1;
pub static PROCESS_WINDOWS_VERBATIM_ARGUMENTS: c_int = 1 << 2;
pub static PROCESS_DETACHED: c_int = 1 << 3;
pub static PROCESS_WINDOWS_HIDE: c_int = 1 << 4;
pub static STDIO_IGNORE: c_int = 0x00;
pub static STDIO_CREATE_PIPE: c_int = 0x01;
pub static STDIO_INHERIT_FD: c_int = 0x02;
pub static STDIO_INHERIT_STREAM: c_int = 0x04;
pub static STDIO_READABLE_PIPE: c_int = 0x10;
pub static STDIO_WRITABLE_PIPE: c_int = 0x20;
// see libuv/include/uv-unix.h
#[cfg(unix)]
pub struct uv_buf_t {
base: *u8,
len: libc::size_t,
}
// see libuv/include/uv-win.h
#[cfg(windows)]
pub struct uv_buf_t {
len: u32,
base: *u8,
}
pub struct uv_process_options_t {
exit_cb: uv_exit_cb,
file: *libc::c_char,
args: **libc::c_char,
env: **libc::c_char,
cwd: *libc::c_char,
flags: libc::c_uint,
stdio_count: libc::c_int,
stdio: *uv_stdio_container_t,
uid: uv_uid_t,
gid: uv_gid_t,
}
// These fields are private because they must be interfaced with through the
// functions below.
pub struct uv_stdio_container_t {
priv flags: libc::c_int,
priv stream: *uv_stream_t,
}
pub type uv_handle_t = c_void;
pub type uv_loop_t = c_void;
pub type uv_idle_t = c_void;
pub type uv_tcp_t = c_void;
pub type uv_udp_t = c_void;
pub type uv_connect_t = c_void;
pub type uv_connection_t = c_void;
pub type uv_write_t = c_void;
pub type uv_async_t = c_void;
pub type uv_timer_t = c_void;
pub type uv_stream_t = c_void;
pub type uv_fs_t = c_void;
pub type uv_udp_send_t = c_void;
pub type uv_getaddrinfo_t = c_void;
pub type uv_process_t = c_void;
pub type uv_pipe_t = c_void;
pub struct uv_timespec_t {
tv_sec: libc::c_long,
tv_nsec: libc::c_long
}
pub struct uv_stat_t {
st_dev: libc::uint64_t,
st_mode: libc::uint64_t,
st_nlink: libc::uint64_t,
st_uid: libc::uint64_t,
st_gid: libc::uint64_t,
st_rdev: libc::uint64_t,
st_ino: libc::uint64_t,
st_size: libc::uint64_t,
st_blksize: libc::uint64_t,
st_blocks: libc::uint64_t,
st_flags: libc::uint64_t,
st_gen: libc::uint64_t,
st_atim: uv_timespec_t,
st_mtim: uv_timespec_t,
st_ctim: uv_timespec_t,
st_birthtim: uv_timespec_t
}
impl uv_stat_t {
pub fn new() -> uv_stat_t {
uv_stat_t {
st_dev: 0,
st_mode: 0,
st_nlink: 0,
st_uid: 0,
st_gid: 0,
st_rdev: 0,
st_ino: 0,
st_size: 0,
st_blksize: 0,
st_blocks: 0,
st_flags: 0,
st_gen: 0,
st_atim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 },
st_mtim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 },
st_ctim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 },
st_birthtim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 }
}
}
pub fn is_file(&self) -> bool {
((self.st_mode) & libc::S_IFMT as libc::uint64_t) == libc::S_IFREG as libc::uint64_t
}
pub fn is_dir(&self) -> bool {
((self.st_mode) & libc::S_IFMT as libc::uint64_t) == libc::S_IFDIR as libc::uint64_t
}
}
pub type uv_idle_cb = extern "C" fn(handle: *uv_idle_t, | status: c_int);
pub type uv_alloc_cb = extern "C" fn(stream: *uv_stream_t,
suggested_size: size_t) -> uv_buf_t;
pub type uv_read_cb = extern "C" fn(stream: *uv_stream_t,
nread: ssize_t,
buf: uv_buf_t);
pub type uv_udp_send_cb = extern "C" fn(req: *uv_udp_send_t,
status: c_int);
pub type uv_udp_recv_cb = extern "C" fn(handle: *uv_udp_t,
nread: ssize_t,
buf: uv_buf_t,
addr: *sockaddr,
flags: c_uint);
pub type uv_close_cb = extern "C" fn(handle: *uv_handle_t);
pub type uv_walk_cb = extern "C" fn(handle: *uv_handle_t,
arg: *c_void);
pub type uv_async_cb = extern "C" fn(handle: *uv_async_t,
status: c_int);
pub type uv_connect_cb = extern "C" fn(handle: *uv_connect_t,
status: c_int);
pub type uv_connection_cb = extern "C" fn(handle: *uv_connection_t,
status: c_int);
pub type uv_timer_cb = extern "C" fn(handle: *uv_timer_t,
status: c_int);
pub type uv_write_cb = extern "C" fn(handle: *uv_write_t,
status: c_int);
pub type uv_getaddrinfo_cb = extern "C" fn(req: *uv_getaddrinfo_t,
status: c_int,
res: *addrinfo);
pub type uv_exit_cb = extern "C" fn(handle: *uv_process_t,
exit_status: c_int,
term_signal: c_int);
pub type sockaddr = c_void;
pub type sockaddr_in = c_void;
pub type sockaddr_in6 = c_void;
pub type sockaddr_storage = c_void;
#[cfg(unix)]
pub type socklen_t = c_int;
// XXX: This is a standard C type. Could probably be defined in libc
#[cfg(target_os = "android")]
#[cfg(target_os = "linux")]
pub struct addrinfo {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: socklen_t,
ai_addr: *sockaddr,
ai_canonname: *char,
ai_next: *addrinfo
}
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
pub struct addrinfo {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: socklen_t,
ai_canonname: *char,
ai_addr: *sockaddr,
ai_next: *addrinfo
}
#[cfg(windows)]
pub struct addrinfo {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: size_t,
ai_canonname: *char,
ai_addr: *sockaddr,
ai_next: *addrinfo
}
#[cfg(unix)] pub type uv_uid_t = libc::types::os::arch::posix88::uid_t;
#[cfg(unix)] pub type uv_gid_t = libc::types::os::arch::posix88::gid_t;
#[cfg(windows)] pub type uv_uid_t = libc::c_uchar;
#[cfg(windows)] pub type uv_gid_t = libc::c_uchar;
#[deriving(Eq)]
pub enum uv_handle_type {
UV_UNKNOWN_HANDLE,
UV_ASYNC,
UV_CHECK,
UV_FS_EVENT,
UV_FS_POLL,
UV_HANDLE,
UV_IDLE,
UV_NAMED_PIPE,
UV_POLL,
UV_PREPARE,
UV_PROCESS,
UV_STREAM,
UV_TCP,
UV_TIMER,
UV_TTY,
UV_UDP,
UV_SIGNAL,
UV_FILE,
UV_HANDLE_TYPE_MAX
}
#[cfg(unix)]
#[deriving(Eq)]
pub enum uv_req_type {
UV_UNKNOWN_REQ,
UV_REQ,
UV_CONNECT,
UV_WRITE,
UV_SHUTDOWN,
UV_UDP_SEND,
UV_FS,
UV_WORK,
UV_GETADDRINFO,
UV_REQ_TYPE_MAX
}
// uv_req_type may have additional fields defined by UV_REQ_TYPE_PRIVATE.
// See UV_REQ_TYPE_PRIVATE at libuv/include/uv-win.h
#[cfg(windows)]
#[deriving(Eq)]
pub enum uv_req_type {
UV_UNKNOWN_REQ,
UV_REQ,
UV_CONNECT,
UV_WRITE,
UV_SHUTDOWN,
UV_UDP_SEND,
UV_FS,
UV_WORK,
UV_GETADDRINFO,
UV_ACCEPT,
UV_FS_EVENT_REQ,
UV_POLL_REQ,
UV_PROCESS_EXIT,
UV_READ,
UV_UDP_RECV,
UV_WAKEUP,
UV_SIGNAL_REQ,
UV_REQ_TYPE_MAX
}
#[deriving(Eq)]
pub enum uv_membership {
UV_LEAVE_GROUP,
UV_JOIN_GROUP
}
pub unsafe fn malloc_handle(handle: uv_handle_type) -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
assert!(handle != UV_UNKNOWN_HANDLE && handle != UV_HANDLE_TYPE_MAX);
let size = rust_uv_handle_size(handle as uint);
let p = malloc(size);
assert!(p.is_not_null());
return p;
}
pub unsafe fn free_handle(v: *c_void) {
#[fixed_stack_segment]; #[inline(never)];
free(v)
}
pub unsafe fn malloc_req(req: uv_req_type) -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
assert!(req != UV_UNKNOWN_REQ && req != UV_REQ_TYPE_MAX);
let size = rust_uv_req_size(req as uint);
let p = malloc(size);
assert!(p.is_not_null());
return p;
}
pub unsafe fn free_req(v: *c_void) {
#[fixed_stack_segment]; #[inline(never)];
free(v)
}
#[test]
fn handle_sanity_check() {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
assert_eq!(UV_HANDLE_TYPE_MAX as uint, rust_uv_handle_type_max());
}
}
#[test]
fn request_sanity_check() {
#[fixed_stack_segment]; #[inline(never)];
unsafe {
assert_eq!(UV_REQ_TYPE_MAX as uint, rust_uv_req_type_max());
}
}
// XXX Event loops ignore SIGPIPE by default.
pub unsafe fn loop_new() -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_loop_new();
}
pub unsafe fn loop_delete(loop_handle: *c_void) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_loop_delete(loop_handle);
}
pub unsafe fn run(loop_handle: *c_void) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_run(loop_handle);
}
pub unsafe fn close<T>(handle: *T, cb: uv_close_cb) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_close(handle as *c_void, cb);
}
pub unsafe fn walk(loop_handle: *c_void, cb: uv_walk_cb, arg: *c_void) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_walk(loop_handle, cb, arg);
}
pub unsafe fn idle_new() -> *uv_idle_t {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_idle_new()
}
pub unsafe fn idle_delete(handle: *uv_idle_t) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_idle_delete(handle)
}
pub unsafe fn idle_init(loop_handle: *uv_loop_t, handle: *uv_idle_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_idle_init(loop_handle, handle)
}
pub unsafe fn idle_start(handle: *uv_idle_t, cb: uv_idle_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_idle_start(handle, cb)
}
pub unsafe fn idle_stop(handle: *uv_idle_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_idle_stop(handle)
}
pub unsafe fn udp_init(loop_handle: *uv_loop_t, handle: *uv_udp_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_init(loop_handle, handle);
}
pub unsafe fn udp_bind(server: *uv_udp_t, addr: *sockaddr_in, flags: c_uint) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_bind(server, addr, flags);
}
pub unsafe fn udp_bind6(server: *uv_udp_t, addr: *sockaddr_in6, flags: c_uint) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_bind6(server, addr, flags);
}
pub unsafe fn udp_send<T>(req: *uv_udp_send_t, handle: *T, buf_in: &[uv_buf_t],
addr: *sockaddr_in, cb: uv_udp_send_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
let buf_ptr = vec::raw::to_ptr(buf_in);
let buf_cnt = buf_in.len() as i32;
return rust_uv_udp_send(req, handle as *c_void, buf_ptr, buf_cnt, addr, cb);
}
pub unsafe fn udp_send6<T>(req: *uv_udp_send_t, handle: *T, buf_in: &[uv_buf_t],
addr: *sockaddr_in6, cb: uv_udp_send_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
let buf_ptr = vec::raw::to_ptr(buf_in);
let buf_cnt = buf_in.len() as i32;
return rust_uv_udp_send6(req, handle as *c_void, buf_ptr, buf_cnt, addr, cb);
}
pub unsafe fn udp_recv_start(server: *uv_udp_t, on_alloc: uv_alloc_cb,
on_recv: uv_udp_recv_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_recv_start(server, on_alloc, on_recv);
}
pub unsafe fn udp_recv_stop(server: *uv_udp_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_recv_stop(server);
}
pub unsafe fn get_udp_handle_from_send_req(send_req: *uv_udp_send_t) -> *uv_udp_t {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_udp_handle_from_send_req(send_req);
}
pub unsafe fn udp_getsockname(handle: *uv_udp_t, name: *sockaddr_storage) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_getsockname(handle, name);
}
pub unsafe fn udp_set_membership(handle: *uv_udp_t, multicast_addr: *c_char,
interface_addr: *c_char, membership: uv_membership) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_set_membership(handle, multicast_addr, interface_addr, membership as c_int);
}
pub unsafe fn udp_set_multicast_loop(handle: *uv_udp_t, on: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_set_multicast_loop(handle, on);
}
pub unsafe fn udp_set_multicast_ttl(handle: *uv_udp_t, ttl: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_set_multicast_ttl(handle, ttl);
}
pub unsafe fn udp_set_ttl(handle: *uv_udp_t, ttl: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_set_ttl(handle, ttl);
}
pub unsafe fn udp_set_broadcast(handle: *uv_udp_t, on: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_udp_set_broadcast(handle, on);
}
pub unsafe fn tcp_init(loop_handle: *c_void, handle: *uv_tcp_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_init(loop_handle, handle);
}
pub unsafe fn tcp_connect(connect_ptr: *uv_connect_t, tcp_handle_ptr: *uv_tcp_t,
addr_ptr: *sockaddr_in, after_connect_cb: uv_connect_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_connect(connect_ptr, tcp_handle_ptr, after_connect_cb, addr_ptr);
}
pub unsafe fn tcp_connect6(connect_ptr: *uv_connect_t, tcp_handle_ptr: *uv_tcp_t,
addr_ptr: *sockaddr_in6, after_connect_cb: uv_connect_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_connect6(connect_ptr, tcp_handle_ptr, after_connect_cb, addr_ptr);
}
pub unsafe fn tcp_bind(tcp_server_ptr: *uv_tcp_t, addr_ptr: *sockaddr_in) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_bind(tcp_server_ptr, addr_ptr);
}
pub unsafe fn tcp_bind6(tcp_server_ptr: *uv_tcp_t, addr_ptr: *sockaddr_in6) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_bind6(tcp_server_ptr, addr_ptr);
}
pub unsafe fn tcp_getpeername(tcp_handle_ptr: *uv_tcp_t, name: *sockaddr_storage) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_getpeername(tcp_handle_ptr, name);
}
pub unsafe fn tcp_getsockname(handle: *uv_tcp_t, name: *sockaddr_storage) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_getsockname(handle, name);
}
pub unsafe fn tcp_nodelay(handle: *uv_tcp_t, enable: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_nodelay(handle, enable);
}
pub unsafe fn tcp_keepalive(handle: *uv_tcp_t, enable: c_int, delay: c_uint) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_keepalive(handle, enable, delay);
}
pub unsafe fn tcp_simultaneous_accepts(handle: *uv_tcp_t, enable: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_tcp_simultaneous_accepts(handle, enable);
}
pub unsafe fn listen<T>(stream: *T, backlog: c_int,
cb: uv_connection_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_listen(stream as *c_void, backlog, cb);
}
pub unsafe fn accept(server: *c_void, client: *c_void) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_accept(server as *c_void, client as *c_void);
}
pub unsafe fn write<T>(req: *uv_write_t,
stream: *T,
buf_in: &[uv_buf_t],
cb: uv_write_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
let buf_ptr = vec::raw::to_ptr(buf_in);
let buf_cnt = buf_in.len() as i32;
return rust_uv_write(req as *c_void, stream as *c_void, buf_ptr, buf_cnt, cb);
}
pub unsafe fn read_start(stream: *uv_stream_t,
on_alloc: uv_alloc_cb,
on_read: uv_read_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_read_start(stream as *c_void, on_alloc, on_read);
}
pub unsafe fn read_stop(stream: *uv_stream_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_read_stop(stream as *c_void);
}
pub unsafe fn strerror(err: c_int) -> *c_char {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_strerror(err);
}
pub unsafe fn err_name(err: c_int) -> *c_char {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_err_name(err);
}
pub unsafe fn async_init(loop_handle: *c_void,
async_handle: *uv_async_t,
cb: uv_async_cb) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_async_init(loop_handle, async_handle, cb);
}
pub unsafe fn async_send(async_handle: *uv_async_t) {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_async_send(async_handle);
}
pub unsafe fn buf_init(input: *u8, len: uint) -> uv_buf_t {
#[fixed_stack_segment]; #[inline(never)];
let out_buf = uv_buf_t { base: ptr::null(), len: 0 as size_t };
let out_buf_ptr = ptr::to_unsafe_ptr(&out_buf);
rust_uv_buf_init(out_buf_ptr, input, len as size_t);
return out_buf;
}
pub unsafe fn timer_init(loop_ptr: *c_void, timer_ptr: *uv_timer_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_timer_init(loop_ptr, timer_ptr);
}
pub unsafe fn timer_start(timer_ptr: *uv_timer_t,
cb: uv_timer_cb, timeout: u64,
repeat: u64) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_timer_start(timer_ptr, cb, timeout, repeat);
}
pub unsafe fn timer_stop(timer_ptr: *uv_timer_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_timer_stop(timer_ptr);
}
pub unsafe fn is_ip4_addr(addr: *sockaddr) -> bool {
#[fixed_stack_segment]; #[inline(never)];
match rust_uv_is_ipv4_sockaddr(addr) { 0 => false, _ => true }
}
pub unsafe fn is_ip6_addr(addr: *sockaddr) -> bool {
#[fixed_stack_segment]; #[inline(never)];
match rust_uv_is_ipv6_sockaddr(addr) { 0 => false, _ => true }
}
pub unsafe fn malloc_ip4_addr(ip: &str, port: int) -> *sockaddr_in {
#[fixed_stack_segment]; #[inline(never)];
do ip.with_c_str |ip_buf| {
rust_uv_ip4_addrp(ip_buf as *u8, port as libc::c_int)
}
}
pub unsafe fn malloc_ip6_addr(ip: &str, port: int) -> *sockaddr_in6 {
#[fixed_stack_segment]; #[inline(never)];
do ip.with_c_str |ip_buf| {
rust_uv_ip6_addrp(ip_buf as *u8, port as libc::c_int)
}
}
pub unsafe fn malloc_sockaddr_storage() -> *sockaddr_storage {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_malloc_sockaddr_storage()
}
pub unsafe fn free_sockaddr_storage(ss: *sockaddr_storage) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_free_sockaddr_storage(ss);
}
pub unsafe fn free_ip4_addr(addr: *sockaddr_in) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_free_ip4_addr(addr);
}
pub unsafe fn free_ip6_addr(addr: *sockaddr_in6) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_free_ip6_addr(addr);
}
pub unsafe fn ip4_name(addr: *sockaddr_in, dst: *u8, size: size_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_ip4_name(addr, dst, size);
}
pub unsafe fn ip6_name(addr: *sockaddr_in6, dst: *u8, size: size_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_ip6_name(addr, dst, size);
}
pub unsafe fn ip4_port(addr: *sockaddr_in) -> c_uint {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_ip4_port(addr);
}
pub unsafe fn ip6_port(addr: *sockaddr_in6) -> c_uint {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_ip6_port(addr);
}
pub unsafe fn fs_open(loop_ptr: *uv_loop_t, req: *uv_fs_t, path: *c_char, flags: int, mode: int,
cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_open(loop_ptr, req, path, flags as c_int, mode as c_int, cb)
}
pub unsafe fn fs_unlink(loop_ptr: *uv_loop_t, req: *uv_fs_t, path: *c_char,
cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_unlink(loop_ptr, req, path, cb)
}
pub unsafe fn fs_write(loop_ptr: *uv_loop_t, req: *uv_fs_t, fd: c_int, buf: *c_void,
len: uint, offset: i64, cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_write(loop_ptr, req, fd, buf, len as c_uint, offset, cb)
}
pub unsafe fn fs_read(loop_ptr: *uv_loop_t, req: *uv_fs_t, fd: c_int, buf: *c_void,
len: uint, offset: i64, cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_read(loop_ptr, req, fd, buf, len as c_uint, offset, cb)
}
pub unsafe fn fs_close(loop_ptr: *uv_loop_t, req: *uv_fs_t, fd: c_int,
cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_close(loop_ptr, req, fd, cb)
}
pub unsafe fn fs_stat(loop_ptr: *uv_loop_t, req: *uv_fs_t, path: *c_char, cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_stat(loop_ptr, req, path, cb)
}
pub unsafe fn fs_fstat(loop_ptr: *uv_loop_t, req: *uv_fs_t, fd: c_int, cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_fstat(loop_ptr, req, fd, cb)
}
pub unsafe fn fs_mkdir(loop_ptr: *uv_loop_t, req: *uv_fs_t, path: *c_char, mode: int,
cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_mkdir(loop_ptr, req, path, mode as c_int, cb)
}
pub unsafe fn fs_rmdir(loop_ptr: *uv_loop_t, req: *uv_fs_t, path: *c_char,
cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_rmdir(loop_ptr, req, path, cb)
}
pub unsafe fn fs_readdir(loop_ptr: *uv_loop_t, req: *uv_fs_t, path: *c_char,
flags: c_int, cb: *u8) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_readdir(loop_ptr, req, path, flags, cb)
}
pub unsafe fn populate_stat(req_in: *uv_fs_t, stat_out: *uv_stat_t) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_populate_uv_stat(req_in, stat_out)
}
pub unsafe fn fs_req_cleanup(req: *uv_fs_t) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_fs_req_cleanup(req);
}
pub unsafe fn spawn(loop_ptr: *c_void, result: *uv_process_t,
options: uv_process_options_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_spawn(loop_ptr, result, options);
}
pub unsafe fn process_kill(p: *uv_process_t, signum: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_process_kill(p, signum);
}
pub unsafe fn process_pid(p: *uv_process_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_process_pid(p);
}
pub unsafe fn set_stdio_container_flags(c: *uv_stdio_container_t,
flags: libc::c_int) {
#[fixed_stack_segment]; #[inline(never)];
rust_set_stdio_container_flags(c, flags);
}
pub unsafe fn set_stdio_container_fd(c: *uv_stdio_container_t,
fd: libc::c_int) {
#[fixed_stack_segment]; #[inline(never)];
rust_set_stdio_container_fd(c, fd);
}
pub unsafe fn set_stdio_container_stream(c: *uv_stdio_container_t,
stream: *uv_stream_t) {
#[fixed_stack_segment]; #[inline(never)];
rust_set_stdio_container_stream(c, stream);
}
pub unsafe fn pipe_init(loop_ptr: *c_void, p: *uv_pipe_t, ipc: c_int) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_pipe_init(loop_ptr, p, ipc)
}
// data access helpers
pub unsafe fn get_result_from_fs_req(req: *uv_fs_t) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_get_result_from_fs_req(req)
}
pub unsafe fn get_ptr_from_fs_req(req: *uv_fs_t) -> *libc::c_void {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_get_ptr_from_fs_req(req)
}
pub unsafe fn get_loop_from_fs_req(req: *uv_fs_t) -> *uv_loop_t {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_get_loop_from_fs_req(req)
}
pub unsafe fn get_loop_from_getaddrinfo_req(req: *uv_getaddrinfo_t) -> *uv_loop_t {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_get_loop_from_getaddrinfo_req(req)
}
pub unsafe fn get_loop_for_uv_handle<T>(handle: *T) -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_loop_for_uv_handle(handle as *c_void);
}
pub unsafe fn get_stream_handle_from_connect_req(connect: *uv_connect_t) -> *uv_stream_t {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_stream_handle_from_connect_req(connect);
}
pub unsafe fn get_stream_handle_from_write_req(write_req: *uv_write_t) -> *uv_stream_t {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_stream_handle_from_write_req(write_req);
}
pub unsafe fn get_data_for_uv_loop(loop_ptr: *c_void) -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_get_data_for_uv_loop(loop_ptr)
}
pub unsafe fn set_data_for_uv_loop(loop_ptr: *c_void, data: *c_void) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_set_data_for_uv_loop(loop_ptr, data);
}
pub unsafe fn get_data_for_uv_handle<T>(handle: *T) -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_data_for_uv_handle(handle as *c_void);
}
pub unsafe fn set_data_for_uv_handle<T, U>(handle: *T, data: *U) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_set_data_for_uv_handle(handle as *c_void, data as *c_void);
}
pub unsafe fn get_data_for_req<T>(req: *T) -> *c_void {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_data_for_req(req as *c_void);
}
pub unsafe fn set_data_for_req<T, U>(req: *T, data: *U) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_set_data_for_req(req as *c_void, data as *c_void);
}
pub unsafe fn get_base_from_buf(buf: uv_buf_t) -> *u8 {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_base_from_buf(buf);
}
pub unsafe fn get_len_from_buf(buf: uv_buf_t) -> size_t {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_get_len_from_buf(buf);
}
pub unsafe fn getaddrinfo(loop_: *uv_loop_t, req: *uv_getaddrinfo_t,
getaddrinfo_cb: uv_getaddrinfo_cb,
node: *c_char, service: *c_char,
hints: *addrinfo) -> c_int {
#[fixed_stack_segment]; #[inline(never)];
return rust_uv_getaddrinfo(loop_, req, getaddrinfo_cb, node, service, hints);
}
pub unsafe fn freeaddrinfo(ai: *addrinfo) {
#[fixed_stack_segment]; #[inline(never)];
rust_uv_freeaddrinfo(ai);
}
pub struct uv_err_data {
err_name: ~str,
err_msg: ~str,
}
extern {
fn rust_uv_handle_size(type_: uintptr_t) -> size_t;
fn rust_uv_req_size(type_: uintptr_t) -> size_t;
fn rust_uv_handle_type_max() -> uintptr_t;
fn rust_uv_req_type_max() -> uintptr_t;
// libuv public API
fn rust_uv_loop_new() -> *c_void;
fn rust_uv_loop_delete(lp: *c_void);
fn rust_uv_run(loop_handle: *c_void);
fn rust_uv_close(handle: *c_void, cb: uv_close_cb);
fn rust_uv_walk(loop_handle: *c_void, cb: uv_walk_cb, arg: *c_void);
fn rust_uv_idle_new() -> *uv_idle_t;
fn rust_uv_idle_delete(handle: *uv_idle_t);
fn rust_uv_idle_init(loop_handle: *uv_loop_t, handle: *uv_idle_t) -> c_int;
fn rust_uv_idle_start(handle: *uv_idle_t, cb: uv_idle_cb) -> c_int;
fn rust_uv_idle_stop(handle: *uv_idle_t) -> c_int;
fn rust_uv_async_send(handle: *uv_async_t);
fn rust_uv_async_init(loop_handle: *c_void,
async_handle: *uv_async_t,
cb: uv_async_cb) -> c_int;
fn rust_uv_tcp_init(loop_handle: *c_void, handle_ptr: *uv_tcp_t) -> c_int;
fn rust_uv_buf_init(out_buf: *uv_buf_t, base: *u8, len: size_t);
fn rust_uv_strerror(err: c_int) -> *c_char;
fn rust_uv_err_name(err: c_int) -> *c_char;
fn rust_uv_ip4_addrp(ip: *u8, port: c_int) -> *sockaddr_in;
fn rust_uv_ip6_addrp(ip: *u8, port: c_int) -> *sockaddr_in6;
fn rust_uv_free_ip4_addr(addr: *sockaddr_in);
fn rust_uv_free_ip6_addr(addr: *sockaddr_in6);
fn rust_uv_ip4_name(src: *sockaddr_in, dst: *u8, size: size_t) -> c_int;
fn rust_uv_ip6_name(src: *sockaddr_in6, dst: *u8, size: size_t) -> c_int;
fn rust_uv_ip4_port(src: *sockaddr_in) -> c_uint;
fn rust_uv_ip6_port(src: *sockaddr_in6) -> c_uint;
fn rust_uv_tcp_connect(req: *uv_connect_t, handle: *uv_tcp_t,
cb: uv_connect_cb,
addr: *sockaddr_in) -> c_int;
fn rust_uv_tcp_bind(tcp_server: *uv_tcp_t, addr: *sockaddr_in) -> c_int;
fn rust_uv_tcp_connect6(req: *uv_connect_t, handle: *uv_tcp_t,
cb: uv_connect_cb,
addr: *sockaddr_in6) -> c_int;
fn rust_uv_tcp_bind6(tcp_server: *uv_tcp_t, addr: *sockaddr_in6) -> c_int;
fn rust_uv_tcp_getpeername(tcp_handle_ptr: *uv_tcp_t, name: *sockaddr_storage) -> c_int;
fn rust_uv_tcp_getsockname(handle: *uv_tcp_t, name: *sockaddr_storage) -> c_int;
fn rust_uv_tcp_nodelay(handle: *uv_tcp_t, enable: c_int) -> c_int;
fn rust_uv_tcp_keepalive(handle: *uv_tcp_t, enable: c_int, delay: c_uint) -> c_int;
fn rust_uv_tcp_simultaneous_accepts(handle: *uv_tcp_t, enable: c_int) -> c_int;
fn rust_uv_udp_init(loop_handle: *uv_loop_t, handle_ptr: *uv_udp_t) -> c_int;
fn rust_uv_udp_bind(server: *uv_udp_t, addr: *sockaddr_in, flags: c_uint) -> c_int;
fn rust_uv_udp_bind6(server: *uv_udp_t, addr: *sockaddr_in6, flags: c_uint) -> c_int;
fn rust_uv_udp_send(req: *uv_udp_send_t, handle: *uv_udp_t, buf_in: *uv_buf_t,
buf_cnt: c_int, addr: *sockaddr_in, cb: uv_udp_send_cb) -> c_int;
fn rust_uv_udp_send6(req: *uv_udp_send_t, handle: *uv_udp_t, buf_in: *uv_buf_t,
buf_cnt: c_int, addr: *sockaddr_in6, cb: uv_udp_send_cb) -> c_int;
fn rust_uv_udp_recv_start(server: *uv_udp_t,
on_alloc: uv_alloc_cb,
on_recv: uv_udp_recv_cb) -> c_int;
fn rust_uv_udp_recv_stop(server: *uv_udp_t) -> c_int;
fn rust_uv_get_udp_handle_from_send_req(req: *uv_udp_send_t) -> *uv_udp_t;
fn rust_uv_udp_getsockname(handle: *uv_udp_t, name: *sockaddr_storage) -> c_int;
fn rust_uv_udp_set_membership(handle: *uv_udp_t, multicast_addr: *c_char,
interface_addr: *c_char, membership: c_int) -> c_int;
fn rust_uv_udp_set_multicast_loop(handle: *uv_udp_t, on: c_int) -> c_int;
fn rust_uv_udp_set_multicast_ttl(handle: *uv_udp_t, ttl: c_int) -> c_int;
fn rust_uv_udp_set_ttl(handle: *uv_udp_t, ttl: c_int) -> c_int;
fn rust_uv_udp_set_broadcast(handle: *uv_udp_t, on: c_int) -> c_int;
fn rust_uv_is_ipv4_sockaddr(addr: *sockaddr) -> c_int;
fn rust_uv_is_ipv6_sockaddr(addr: *sockaddr) -> c_int;
fn rust_uv_malloc_sockaddr_storage() -> *sockaddr_storage;
fn rust_uv_free_sockaddr_storage(ss: *sockaddr_storage);
fn rust_uv_listen(stream: *c_void, backlog: c_int,
cb: uv_connection_cb) -> c_int;
fn rust_uv_accept(server: *c_void, client: *c_void) -> c_int;
fn rust_uv_write(req: *c_void, stream: *c_void, buf_in: *uv_buf_t, buf_cnt: c_int,
cb: uv_write_cb) -> c_int;
fn rust_uv_read_start(stream: *c_void,
on_alloc: uv_alloc_cb,
on_read: uv_read_cb) -> c_int;
fn rust_uv_read_stop(stream: *c_void) -> c_int;
fn rust_uv_timer_init(loop_handle: *c_void, timer_handle: *uv_timer_t) -> c_int;
fn rust_uv_timer_start(timer_handle: *uv_timer_t, cb: uv_timer_cb, timeout: libc::uint64_t,
repeat: libc::uint64_t) -> c_int;
fn rust_uv_timer_stop(handle: *uv_timer_t) -> c_int;
fn rust_uv_fs_open(loop_ptr: *c_void, req: *uv_fs_t, path: *c_char,
flags: c_int, mode: c_int, cb: *u8) -> c_int;
fn rust_uv_fs_unlink(loop_ptr: *c_void, req: *uv_fs_t, path: *c_char,
cb: *u8) -> c_int;
fn rust_uv_fs_write(loop_ptr: *c_void, req: *uv_fs_t, fd: c_int,
buf: *c_void, len: c_uint, offset: i64, cb: *u8) -> c_int;
fn rust_uv_fs_read(loop_ptr: *c_void, req: *uv_fs_t, fd: c_int,
buf: *c_void, len: c_uint, offset: i64, cb: *u8) -> c_int;
fn rust_uv_fs_close(loop_ptr: *c_void, req: *uv_fs_t, fd: c_int,
cb: *u8) -> c_int;
fn rust_uv_fs_stat(loop_ptr: *c_void, req: *uv_fs_t, path: *c_char, cb: *u8) -> c_int;
fn rust_uv_fs_fstat(loop_ptr: *c_void, req: *uv_fs_t, fd: c_int, cb: *u8) -> c_int;
fn rust_uv_fs_mkdir(loop_ptr: *c_void, req: *uv_fs_t, path: *c_char,
mode: c_int, cb: *u8) -> c_int;
fn rust_uv_fs_rmdir(loop_ptr: *c_void, req: *uv_fs_t, path: *c_char,
cb: *u8) -> c_int;
fn rust_uv_fs_readdir(loop_ptr: *c_void, req: *uv_fs_t, path: *c_char,
flags: c_int, cb: *u8) -> c_int;
fn rust_uv_fs_req_cleanup(req: *uv_fs_t);
fn rust_uv_populate_uv_stat(req_in: *uv_fs_t, stat_out: *uv_stat_t);
fn rust_uv_get_result_from_fs_req(req: *uv_fs_t) -> c_int;
fn rust_uv_get_ptr_from_fs_req(req: *uv_fs_t) -> *libc::c_void;
fn rust_uv_get_loop_from_fs_req(req: *uv_fs_t) -> *uv_loop_t;
fn rust_uv_get_loop_from_getaddrinfo_req(req: *uv_fs_t) -> *uv_loop_t;
fn rust_uv_get_stream_handle_from_connect_req(connect_req: *uv_connect_t) -> *uv_stream_t;
fn rust_uv_get_stream_handle_from_write_req(write_req: *uv_write_t) -> *uv_stream_t;
fn rust_uv_get_loop_for_uv_handle(handle: *c_void) -> *c_void;
fn rust_uv_get_data_for_uv_loop(loop_ptr: *c_void) -> *c_void;
fn rust_uv_set_data_for_uv_loop(loop_ptr: *c_void, data: *c_void);
fn rust_uv_get_data_for_uv_handle(handle: *c_void) -> *c_void;
fn rust_uv_set_data_for_uv_handle(handle: *c_void, data: *c_void);
fn rust_uv_get_data_for_req(req: *c_void) -> *c_void;
fn rust_uv_set_data_for_req(req: *c_void, data: *c_void);
fn rust_uv_get_base_from_buf(buf: uv_buf_t) -> *u8;
fn rust_uv_get_len_from_buf(buf: uv_buf_t) -> size_t;
fn rust_uv_getaddrinfo(loop_: *uv_loop_t, req: *uv_getaddrinfo_t,
getaddrinfo_cb: uv_getaddrinfo_cb,
node: *c_char, service: *c_char,
hints: *addrinfo) -> c_int;
fn rust_uv_freeaddrinfo(ai: *addrinfo);
fn rust_uv_spawn(loop_ptr: *c_void, outptr: *uv_process_t,
options: uv_process_options_t) -> c_int;
fn rust_uv_process_kill(p: *uv_process_t, signum: c_int) -> c_int;
fn rust_uv_process_pid(p: *uv_process_t) -> c_int;
fn rust_set_stdio_container_flags(c: *uv_stdio_container_t, flags: c_int);
fn rust_set_stdio_container_fd(c: *uv_stdio_container_t, fd: c_int);
fn rust_set_stdio_container_stream(c: *uv_stdio_container_t,
stream: *uv_stream_t);
fn rust_uv_pipe_init(loop_ptr: *c_void, p: *uv_pipe_t, ipc: c_int) -> c_int;
} | |
index.js | /**
* @typedef {import("@typings/index").ApiRequest} ApiRequest
* @typedef {import("@typings/arc").SpotifySession} SpotifySession
*/
const { http } = require("@architect/functions");
const { init, refresh } = require("./session");
/** @type {ApiRequest} */
const auth = async (req) => {
if (req.query.code) {
/** @type {SpotifySession | { error: string }} */
let account;
| return {
statusCode: err.code,
body: err.message,
};
}
return {
session: { ...account },
location: "/",
};
}
if (req.query.refreshUrl) {
/** @type {string | { error: string }} */
let accessToken;
try {
accessToken = await refresh(req.session.refreshToken);
return {
session: { ...req.session, accessToken },
location: req.query.refreshUrl,
};
} catch (err) {
return {
statusCode: err.code,
body: err.message,
};
}
}
return {
location: "/",
};
};
module.exports = {
handler: http.async(auth),
}; | try {
account = await init(req.query.code);
} catch (err) { |
Container.test.js | import React from "react"; |
it("renders without crashing", () => {
const div = document.createElement("div");
render(<Container />, div);
}); | import { render } from "@testing-library/react";
import { shallow } from "enzyme";
import Container from "../Container"; |
lie_algebras.py | r"""
Lie Algebras
AUTHORS:
- Travis Scrimshaw (07-15-2013): Initial implementation
"""
#*****************************************************************************
# Copyright (C) 2013 Travis Scrimshaw <tscrim at ucdavis.edu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.misc.abstract_method import abstract_method
from sage.misc.cachefunc import cached_method
from sage.misc.lazy_attribute import lazy_attribute
from sage.misc.lazy_import import LazyImport
from sage.categories.category import JoinCategory, Category
from sage.categories.category_types import Category_over_base_ring
from sage.categories.category_with_axiom import CategoryWithAxiom_over_base_ring
from sage.categories.finite_enumerated_sets import FiniteEnumeratedSets
from sage.categories.modules import Modules
from sage.categories.sets_cat import Sets
from sage.categories.homset import Hom
from sage.categories.morphism import Morphism
from sage.structure.element import coerce_binop
class LieAlgebras(Category_over_base_ring):
"""
The category of Lie algebras.
EXAMPLES::
sage: C = LieAlgebras(QQ); C
Category of Lie algebras over Rational Field
sage: sorted(C.super_categories(), key=str)
[Category of vector spaces over Rational Field]
We construct a typical parent in this category, and do some
computations with it::
sage: A = C.example(); A
An example of a Lie algebra: the Lie algebra from the associative
algebra Symmetric group algebra of order 3 over Rational Field
generated by ([2, 1, 3], [2, 3, 1])
sage: A.category()
Category of Lie algebras over Rational Field
sage: A.base_ring()
Rational Field
sage: a,b = A.lie_algebra_generators()
sage: a.bracket(b)
-[1, 3, 2] + [3, 2, 1]
sage: b.bracket(2*a + b)
2*[1, 3, 2] - 2*[3, 2, 1]
sage: A.bracket(a, b)
-[1, 3, 2] + [3, 2, 1]
Please see the source code of `A` (with ``A??``) for how to
implement other Lie algebras.
TESTS::
sage: C = LieAlgebras(QQ)
sage: TestSuite(C).run()
sage: TestSuite(C.example()).run()
.. TODO::
Many of these tests should use Lie algebras that are not the minimal
example and need to be added after :trac:`16820` (and :trac:`16823`).
"""
@cached_method
def super_categories(self):
"""
EXAMPLES::
sage: LieAlgebras(QQ).super_categories()
[Category of vector spaces over Rational Field]
"""
# We do not also derive from (Magmatic) algebras since we don't want *
# to be our Lie bracket
# Also this doesn't inherit the ability to add axioms like Associative
# and Unital, both of which do not make sense for Lie algebras
return [Modules(self.base_ring())]
# TODO: Find some way to do this without copying most of the logic.
def _repr_object_names(self):
r"""
Return the name of the objects of this category.
.. SEEALSO:: :meth:`Category._repr_object_names`
EXAMPLES::
sage: LieAlgebras(QQ)._repr_object_names()
'Lie algebras over Rational Field'
sage: LieAlgebras(Fields())._repr_object_names()
'Lie algebras over fields'
sage: from sage.categories.category import JoinCategory
sage: from sage.categories.category_with_axiom import Blahs
sage: LieAlgebras(JoinCategory((Blahs().Flying(), Fields())))
Category of Lie algebras over (flying unital blahs and fields)
"""
base = self.base()
if isinstance(base, Category):
if isinstance(base, JoinCategory):
name = '('+' and '.join(C._repr_object_names() for C in base.super_categories())+')'
else:
name = base._repr_object_names()
else:
name = base
return "Lie algebras over {}".format(name)
def example(self, gens=None):
"""
Return an example of a Lie algebra as per
:meth:`Category.example <sage.categories.category.Category.example>`.
EXAMPLES::
sage: LieAlgebras(QQ).example()
An example of a Lie algebra: the Lie algebra from the associative algebra
Symmetric group algebra of order 3 over Rational Field
generated by ([2, 1, 3], [2, 3, 1])
Another set of generators can be specified as an optional argument::
sage: F.<x,y,z> = FreeAlgebra(QQ)
sage: LieAlgebras(QQ).example(F.gens())
An example of a Lie algebra: the Lie algebra from the associative algebra
Free Algebra on 3 generators (x, y, z) over Rational Field
generated by (x, y, z)
"""
if gens is None:
from sage.combinat.symmetric_group_algebra import SymmetricGroupAlgebra
from sage.rings.all import QQ
gens = SymmetricGroupAlgebra(QQ, 3).algebra_generators()
from sage.categories.examples.lie_algebras import Example
return Example(gens)
WithBasis = LazyImport('sage.categories.lie_algebras_with_basis',
'LieAlgebrasWithBasis', as_name='WithBasis')
class FiniteDimensional(CategoryWithAxiom_over_base_ring):
WithBasis = LazyImport('sage.categories.finite_dimensional_lie_algebras_with_basis',
'FiniteDimensionalLieAlgebrasWithBasis', as_name='WithBasis')
def extra_super_categories(self):
"""
Implements the fact that a finite dimensional Lie algebra over
a finite ring is finite.
EXAMPLES::
sage: LieAlgebras(IntegerModRing(4)).FiniteDimensional().extra_super_categories()
[Category of finite sets]
sage: LieAlgebras(ZZ).FiniteDimensional().extra_super_categories()
[]
sage: LieAlgebras(GF(5)).FiniteDimensional().is_subcategory(Sets().Finite())
True
sage: LieAlgebras(ZZ).FiniteDimensional().is_subcategory(Sets().Finite())
False
sage: LieAlgebras(GF(5)).WithBasis().FiniteDimensional().is_subcategory(Sets().Finite())
True
"""
if self.base_ring() in Sets().Finite():
return [Sets().Finite()]
return []
class ParentMethods:
#@abstract_method
#def lie_algebra_generators(self):
# """
# Return the generators of ``self`` as a Lie algebra.
# """
# TODO: Move this to LieAlgebraElement, cythonize, and use more standard
# coercion framework test (i.e., have_same_parent)
def bracket(self, lhs, rhs):
"""
Return the Lie bracket ``[lhs, rhs]`` after coercing ``lhs`` and
``rhs`` into elements of ``self``.
EXAMPLES::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: L.bracket(x, x + y)
-[1, 3, 2] + [3, 2, 1]
sage: L.bracket(x, 0)
0
sage: L.bracket(0, x)
0
"""
return self(lhs)._bracket_(self(rhs))
# Do not override this. Instead implement :meth:`_construct_UEA`;
# then, :meth:`lift` and :meth:`universal_enveloping_algebra`
# will automatically setup the coercion.
def universal_enveloping_algebra(self):
"""
Return the universal enveloping algebra of ``self``.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: L.universal_enveloping_algebra()
Noncommutative Multivariate Polynomial Ring in b0, b1, b2
over Rational Field, nc-relations: {}
::
sage: L = LieAlgebra(QQ, 3, 'x', abelian=True)
sage: L.universal_enveloping_algebra()
Multivariate Polynomial Ring in x0, x1, x2 over Rational Field
.. SEEALSO::
:meth:`lift`
"""
return self.lift.codomain()
@abstract_method(optional=True)
def _construct_UEA(self):
"""
Return the universal enveloping algebra of ``self``.
Unlike :meth:`universal_enveloping_algebra`, this method does not
(usually) construct the canonical lift morphism from ``self``
to the universal enveloping algebra (let alone register it
as a coercion).
One should implement this method and the ``lift`` method for
the element class to construct the morphism the universal
enveloping algebra.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: L._construct_UEA()
Noncommutative Multivariate Polynomial Ring in b0, b1, b2
over Rational Field, nc-relations: {}
::
sage: L = LieAlgebra(QQ, 3, 'x', abelian=True)
sage: L.universal_enveloping_algebra() # indirect doctest
Multivariate Polynomial Ring in x0, x1, x2 over Rational Field
"""
@abstract_method(optional=True)
def module(self):
r"""
Return an `R`-module which is isomorphic to the
underlying `R`-module of ``self``.
The rationale behind this method is to enable linear
algebraic functionality on ``self`` (such as
computing the span of a list of vectors in ``self``)
via an isomorphism from ``self`` to an `R`-module
(typically, although not always, an `R`-module of
the form `R^n` for an `n \in \NN`) on which such
functionality already exists. For this method to be
of any use, it should return an `R`-module which has
linear algebraic functionality that ``self`` does
not have.
For instance, if ``self`` has ordered basis
`(e, f, h)`, then ``self.module()`` will be the
`R`-module `R^3`, and the elements `e`, `f` and
`h` of ``self`` will correspond to the basis
vectors `(1, 0, 0)`, `(0, 1, 0)` and `(0, 0, 1)`
of ``self.module()``.
This method :meth:`module` needs to be set whenever
a finite-dimensional Lie algebra with basis is
intended to support linear algebra (which is, e.g.,
used in the computation of centralizers and lower
central series). One then needs to also implement
the `R`-module isomorphism from ``self`` to
``self.module()`` in both directions; that is,
implement:
* a ``to_vector`` ElementMethod which sends every
element of ``self`` to the corresponding element of
``self.module()``;
* a ``from_vector`` ParentMethod which sends every
element of ``self.module()`` to an element
of ``self``.
The ``from_vector`` method will automatically serve
as an element constructor of ``self`` (that is,
``self(v)`` for any ``v`` in ``self.module()`` will
return ``self.from_vector(v)``).
.. TODO::
Ensure that this is actually so.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: L.module()
Vector space of dimension 3 over Rational Field
"""
@abstract_method(optional=True)
def from_vector(self, v):
"""
Return the element of ``self`` corresponding to the
vector ``v`` in ``self.module()``.
Implement this if you implement :meth:`module`; see the
documentation of the latter for how this is to be done.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: u = L.from_vector(vector(QQ, (1, 0, 0))); u
(1, 0, 0)
sage: parent(u) is L
True
"""
@lazy_attribute
def lift(self):
r"""
Construct the lift morphism from ``self`` to the universal
enveloping algebra of ``self`` (the latter is implemented
as :meth:`universal_enveloping_algebra`).
This is a Lie algebra homomorphism. It is injective if
``self`` is a free module over its base ring, or if the
base ring is a `\QQ`-algebra.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: lifted = L.lift(2*a + b - c); lifted
2*b0 + b1 - b2
sage: lifted.parent() is L.universal_enveloping_algebra()
True
"""
M = LiftMorphism(self, self._construct_UEA())
M.register_as_coercion()
return M
def subalgebra(self, gens, names=None, index_set=None, category=None):
r"""
Return the subalgebra of ``self`` generated by ``gens``.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: L.subalgebra([2*a - c, b + c])
An example of a finite dimensional Lie algebra with basis:
the 2-dimensional abelian Lie algebra over Rational Field
with basis matrix:
[ 1 0 -1/2]
[ 0 1 1]
::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: L.subalgebra([x + y])
Traceback (most recent call last):
...
NotImplementedError: subalgebras not yet implemented: see #17416
"""
raise NotImplementedError("subalgebras not yet implemented: see #17416")
#from sage.algebras.lie_algebras.subalgebra import LieSubalgebra
#return LieSubalgebra(gens, names, index_set, category)
def ideal(self, gens, names=None, index_set=None, category=None):
r"""
Return the ideal of ``self`` generated by ``gens``.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: L.ideal([2*a - c, b + c])
An example of a finite dimensional Lie algebra with basis:
the 2-dimensional abelian Lie algebra over Rational Field
with basis matrix:
[ 1 0 -1/2]
[ 0 1 1]
::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: L.ideal([x + y])
Traceback (most recent call last):
...
NotImplementedError: ideals not yet implemented: see #16824
"""
raise NotImplementedError("ideals not yet implemented: see #16824")
#from sage.algebras.lie_algebras.ideal import LieIdeal
#return LieIdeal(gens, names, index_set, category)
def is_ideal(self, A):
"""
Return if ``self`` is an ideal of ``A``.
EXAMPLES::
sage: L = LieAlgebras(QQ).example()
sage: L.is_ideal(L)
True
"""
if A == self:
return True
raise NotImplementedError("ideals not yet implemented: see #16824")
#from sage.algebras.lie_algebras.ideal import LieIdeal
#return isinstance(self, LieIdeal) and self._ambient is A
@abstract_method(optional=True)
def killing_form(self, x, y):
"""
Return the Killing form of ``x`` and ``y``.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: L.killing_form(a, b+c)
0
"""
def is_abelian(self):
r"""
Return ``True`` if this Lie algebra is abelian.
A Lie algebra `\mathfrak{g}` is abelian if `[x, y] = 0` for all
`x, y \in \mathfrak{g}`.
EXAMPLES::
sage: L = LieAlgebras(QQ).example()
sage: L.is_abelian()
False
sage: R = QQ['x,y']
sage: L = LieAlgebras(QQ).example(R.gens())
sage: L.is_abelian()
True
::
sage: L.<x> = LieAlgebra(QQ,1) # todo: not implemented - #16823
sage: L.is_abelian() # todo: not implemented - #16823
True
sage: L.<x,y> = LieAlgebra(QQ,2) # todo: not implemented - #16823
sage: L.is_abelian() # todo: not implemented - #16823
False
"""
G = self.lie_algebra_generators()
if G not in FiniteEnumeratedSets():
|
zero = self.zero()
return all(x._bracket_(y) == zero for x in G for y in G)
def is_commutative(self):
"""
Return if ``self`` is commutative. This is equivalent to ``self``
being abelian.
EXAMPLES::
sage: L = LieAlgebras(QQ).example()
sage: L.is_commutative()
False
::
sage: L.<x> = LieAlgebra(QQ, 1) # todo: not implemented - #16823
sage: L.is_commutative() # todo: not implemented - #16823
True
"""
return self.is_abelian()
@abstract_method(optional=True)
def is_solvable(self):
"""
Return if ``self`` is a solvable Lie algebra.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: L.is_solvable()
True
"""
@abstract_method(optional=True)
def is_nilpotent(self):
"""
Return if ``self`` is a nilpotent Lie algebra.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: L.is_nilpotent()
True
"""
def _test_jacobi_identity(self, **options):
"""
Test that the Jacobi identity is satisfied on (not
necessarily all) elements of this set.
INPUT:
- ``options`` -- any keyword arguments accepted by :meth:`_tester`.
EXAMPLES:
By default, this method runs the tests only on the
elements returned by ``self.some_elements()``::
sage: L = LieAlgebras(QQ).example()
sage: L._test_jacobi_identity()
However, the elements tested can be customized with the
``elements`` keyword argument::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: L._test_jacobi_identity(elements=[x+y, x, 2*y, x.bracket(y)])
See the documentation for :class:`TestSuite` for more information.
"""
tester = self._tester(**options)
elts = tester.some_elements()
jacobi = lambda x, y, z: self.bracket(x, self.bracket(y, z)) + \
self.bracket(y, self.bracket(z, x)) + \
self.bracket(z, self.bracket(x, y))
zero = self.zero()
for x in elts:
for y in elts:
if x == y:
continue
for z in elts:
tester.assertTrue(jacobi(x, y, z) == zero)
def _test_antisymmetry(self, **options):
"""
Test that the antisymmetry axiom is satisfied on (not
necessarily all) elements of this set.
INPUT:
- ``options`` -- any keyword arguments accepted by :meth:`_tester`.
EXAMPLES:
By default, this method runs the tests only on the
elements returned by ``self.some_elements()``::
sage: L = LieAlgebras(QQ).example()
sage: L._test_antisymmetry()
However, the elements tested can be customized with the
``elements`` keyword argument::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: L._test_antisymmetry(elements=[x+y, x, 2*y, x.bracket(y)])
See the documentation for :class:`TestSuite` for more information.
"""
tester = self._tester(**options)
elts = tester.some_elements()
zero = self.zero()
for x in elts:
tester.assertTrue(self.bracket(x, x) == zero)
def _test_distributivity(self, **options):
r"""
Test the distributivity of the Lie bracket `[,]` on `+` on (not
necessarily all) elements of this set.
INPUT:
- ``options`` -- any keyword arguments accepted by :meth:`_tester`.
TESTS::
sage: L = LieAlgebras(QQ).example()
sage: L._test_distributivity()
EXAMPLES:
By default, this method runs the tests only on the
elements returned by ``self.some_elements()``::
sage: L = LieAlgebra(QQ, 3, 'x,y,z', representation="polynomial")
sage: L.some_elements()
[x + y + z]
sage: L._test_distributivity()
However, the elements tested can be customized with the
``elements`` keyword argument::
sage: L = LieAlgebra(QQ, cartan_type=['A', 2]) # todo: not implemented - #16821
sage: h1 = L.gen(0) # todo: not implemented - #16821
sage: h2 = L.gen(1) # todo: not implemented - #16821
sage: e2 = L.gen(3) # todo: not implemented - #16821
sage: L._test_distributivity(elements=[h1, h2, e2]) # todo: not implemented - #16821
See the documentation for :class:`TestSuite` for more information.
"""
tester = self._tester(**options)
S = tester.some_elements()
from sage.misc.misc import some_tuples
for x,y,z in some_tuples(S, 3, tester._max_runs):
# left distributivity
tester.assertTrue(self.bracket(x, (y + z))
== self.bracket(x, y) + self.bracket(x, z))
# right distributivity
tester.assertTrue(self.bracket((x + y), z)
== self.bracket(x, z) + self.bracket(y, z))
class ElementMethods:
@coerce_binop
def bracket(self, rhs):
"""
Return the Lie bracket ``[self, rhs]``.
EXAMPLES::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: x.bracket(y)
-[1, 3, 2] + [3, 2, 1]
sage: x.bracket(0)
0
"""
return self._bracket_(rhs)
# Implement this method to define the Lie bracket. You do not
# need to deal with the coercions here.
@abstract_method
def _bracket_(self, y):
"""
Return the Lie bracket ``[self, y]``, where ``y`` is an
element of the same Lie algebra as ``self``.
EXAMPLES::
sage: L = LieAlgebras(QQ).example()
sage: x,y = L.lie_algebra_generators()
sage: x._bracket_(y)
-[1, 3, 2] + [3, 2, 1]
sage: y._bracket_(x)
[1, 3, 2] - [3, 2, 1]
sage: x._bracket_(x)
0
"""
@abstract_method(optional=True)
def to_vector(self):
"""
Return the vector in ``g.module()`` corresponding to the
element ``self`` of ``g`` (where ``g`` is the parent of
``self``).
Implement this if you implement ``g.module()``.
See :meth:`LieAlgebras.module` for how this is to be done.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: u = L((1, 0, 0)).to_vector(); u
(1, 0, 0)
sage: parent(u)
Vector space of dimension 3 over Rational Field
"""
@abstract_method(optional=True)
def lift(self):
"""
Return the image of ``self`` under the canonical lift from the Lie
algebra to its universal enveloping algebra.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: elt = 3*a + b - c
sage: elt.lift()
3*b0 + b1 - b2
::
sage: L.<x,y> = LieAlgebra(QQ, abelian=True)
sage: x.lift()
x
"""
def killing_form(self, x):
"""
Return the Killing form of ``self`` and ``x``.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: a.killing_form(b)
0
"""
return self.parent().killing_form(self, x)
class LiftMorphism(Morphism):
"""
The natural lifting morphism from a Lie algebra to its
enveloping algebra.
"""
def __init__(self, domain, codomain):
"""
Initialize ``self``.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: f = L.lift
We skip the category test since this is currently not an element of
a homspace::
sage: TestSuite(f).run(skip="_test_category")
"""
Morphism.__init__(self, Hom(domain, codomain))
def _call_(self, x):
"""
Lift ``x`` to the universal enveloping algebra.
EXAMPLES::
sage: L = LieAlgebras(QQ).FiniteDimensional().WithBasis().example()
sage: a, b, c = L.lie_algebra_generators()
sage: L.lift(3*a + b - c)
3*b0 + b1 - b2
"""
return x.lift()
| raise NotImplementedError("infinite number of generators") |
Merge_Two_Sorted_Lists_Solution_2.py |
# Space: O(1)
# Time: O(n)
# Iterative approach
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
| def mergeTwoLists(self, l1, l2):
res = ListNode(0)
cur_res = res
cur1, cur2 = l1,l2
while cur1 and cur2:
if cur1.val<cur2.val:
cur_res.next = cur1
cur1 = cur1.next
cur_res = cur_res.next
else:
cur_res.next = cur2
cur2 = cur2.next
cur_res = cur_res.next
if cur1:
cur_res.next = cur1
else:
cur_res.next = cur2
return res.next |
|
point_3.rs | use crate::vec3::Vec3;
use std::ops::{Deref, DerefMut};
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Point3(Vec3);
impl Deref for Point3 {
type Target = Vec3;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Point3 {
fn | (&mut self) -> &mut Self::Target {
&mut self.0
}
}
| deref_mut |
server.go | // Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package main
import (
"github.com/uber/cadence/common"
"github.com/uber/cadence/common/service"
"github.com/uber/cadence/common/service/config"
"github.com/uber/cadence/service/frontend"
"github.com/uber/cadence/service/history"
"github.com/uber/cadence/service/matching"
"log"
"time"
)
type (
server struct {
name string
cfg *config.Config
doneC chan struct{}
daemon common.Daemon
}
)
const (
frontendService = "frontend"
historyService = "history"
matchingService = "matching"
)
// newServer returns a new instance of a daemon
// that represents a cadence service
func newServer(service string, cfg *config.Config) common.Daemon {
return &server{
cfg: cfg,
name: service,
doneC: make(chan struct{}),
}
}
// Start starts the server
func (s *server) Start() {
if _, ok := s.cfg.Services[s.name]; !ok {
log.Fatalf("`%v` service missing config", s)
}
s.daemon = s.startService()
}
// Stop stops the server
func (s *server) Stop() {
if s.daemon == nil {
return
}
select {
case <-s.doneC:
default:
s.daemon.Stop()
select { | }
}
}
// startService starts a service with the given name and config
func (s *server) startService() common.Daemon {
var err error
params := service.BootstrapParams{}
params.Name = "cadence-" + s.name
params.Logger = s.cfg.Log.NewBarkLogger()
params.CassandraConfig = s.cfg.Cassandra
params.RingpopFactory, err = s.cfg.Ringpop.NewFactory()
if err != nil {
log.Fatalf("error creating ringpop factory: %v", err)
}
svcCfg := s.cfg.Services[s.name]
params.MetricScope = svcCfg.Metrics.NewScope()
params.TChannelFactory = svcCfg.TChannel.NewFactory()
var daemon common.Daemon
switch s.name {
case frontendService:
daemon = frontend.NewService(¶ms)
case historyService:
daemon = history.NewService(¶ms)
case matchingService:
daemon = matching.NewService(¶ms)
}
go execute(daemon, s.doneC)
return daemon
}
// execute runs the daemon in a separate go routine
func execute(d common.Daemon, doneC chan struct{}) {
d.Start()
doneC <- struct{}{}
} | case <-s.doneC:
case <-time.After(time.Minute):
log.Printf("timed out waiting for server %v to exit\n", s.name) |
tekton.go | package kabaneroplatform
import (
"context"
"fmt"
"strings"
kabanerov1alpha2 "github.com/kabanero-io/kabanero-operator/pkg/apis/kabanero/v1alpha2"
tektoncdv1alpha1 "github.com/tektoncd/operator/pkg/apis/operator/v1alpha1"
"sigs.k8s.io/controller-runtime/pkg/client"
)
// Retrieves the Tekton instance status.
func getTektonStatus(k *kabanerov1alpha2.Kabanero, c client.Client) (bool, error) {
k.Status.Tekton.ErrorMessage = ""
k.Status.Tekton.Ready = "False"
// Get the tekton instance.
tektonInstName := "cluster"
tekton := &tektoncdv1alpha1.Config{}
err := c.Get(context.TODO(), client.ObjectKey{
Name: tektonInstName}, tekton)
if err != nil |
// Starting with version 0.5.*, the first condition in the list is the one that matters.
// The state of an installation can be: installing, installed, or error.
ready := false
readyCondition := tekton.Status.Conditions[0]
k.Status.Tekton.Version = readyCondition.Version
code := strings.ToLower(string(readyCondition.Code))
if code == "error" {
k.Status.Tekton.ErrorMessage = readyCondition.Details
} else if code == "installed" {
ready = true
k.Status.Tekton.Ready = "True"
}
return ready, err
}
| {
message := "Tekton instance with the name of " + tektonInstName + " could not be found."
k.Status.Tekton.ErrorMessage = message
fmt.Println("Error while assessing Tekton readiness. Unable to add tekton scheme.", err)
return false, err
} |
Join.ts | import { Subscription } from 'suub';
import { Value, Callback, ExtractValues, SYNCLOCK } from '../types';
import { mergeClocks } from '../Clock';
export function join<T extends { [key: string]: Value<any> }>(deps: T): Value<ExtractValues<T>> {
const clock = mergeClocks(...Array.from(Object.values(deps)).map(v => v[SYNCLOCK]));
const tick = 0;
const sub = Subscription<ExtractValues<T>>() as Subscription<ExtractValues<T>>;
let state: ExtractValues<T> = Object.keys(deps).reduce<ExtractValues<T>>((acc, key) => {
if (!deps[key]) {
console.warn('Error in ', key);
}
(acc as any)[key] = deps[key].get();
return acc;
}, {} as any);
let nextState = state;
let destroyed = false; | return;
}
if (nextState !== state) {
state = nextState;
sub.emit(state);
}
});
const unsubParents = subDeps();
function subDeps(): Callback {
let unsubs: Array<Callback> = [];
Object.keys(deps).forEach(key => {
unsubs.push(
deps[key].sub(
value => {
if (nextState[key] !== value) {
nextState = {
...nextState,
[key]: value
};
}
},
() => {
destroy();
}
)
);
});
return () => {
unsubs.forEach(unsub => {
unsub();
});
};
}
function destroy() {
if (destroyed) {
return;
}
unsubClock();
sub.unsubscribeAll();
unsubParents();
}
return {
[SYNCLOCK]: { tick, clock },
get: () => state,
sub: (cb, onUnsub) => {
if (destroyed) {
throw new Error('Destroyed');
}
return sub.subscribe(cb, onUnsub);
},
destroy
};
} |
const unsubClock = clock.subscribe(tick, () => {
if (destroyed) { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.