file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
area_service.py | """
byceps.services.seating.area_service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from typing import Optional
from sqlalchemy import select
from sqlalchemy.sql import Select
from ...database import db, paginate, Pagination
from ...typing import PartyID
from ..ticketing.dbmodels.ticket import Ticket as DbTicket
from .dbmodels.area import Area as DbArea
from .dbmodels.seat import Seat as DbSeat
from .transfer.models import Area, SeatUtilization
def create_area(party_id: PartyID, slug: str, title: str) -> Area:
"""Create an area."""
area = DbArea(party_id, slug, title)
db.session.add(area)
db.session.commit()
return _db_entity_to_area(area)
def delete_area(area_id: str) -> None:
"""Delete an area."""
db.session.query(DbArea) \
.filter_by(id=area_id) \
.delete()
db.session.commit()
def count_areas_for_party(party_id: PartyID) -> int:
"""Return the number of seating areas for that party."""
return db.session \
.query(DbArea) \
.filter_by(party_id=party_id) \
.count()
def | (party_id: PartyID, slug: str) -> Optional[Area]:
"""Return the area for that party with that slug, or `None` if not found."""
area = db.session \
.query(DbArea) \
.filter_by(party_id=party_id) \
.filter_by(slug=slug) \
.first()
if area is None:
return None
return _db_entity_to_area(area)
def get_areas_with_seat_utilization(
party_id: PartyID,
) -> list[Area, SeatUtilization]:
"""Return all areas and their seat utilization for that party."""
query = _get_areas_with_seat_utilization_query(party_id)
rows = db.session.execute(query).all()
return [_map_areas_with_seat_utilization_row(row) for row in rows]
def get_areas_with_seat_utilization_paginated(
party_id: PartyID, page: int, per_page: int
) -> Pagination:
"""Return areas and their seat utilization for that party, paginated."""
items_query = _get_areas_with_seat_utilization_query(party_id)
count_query = select(db.func.count(DbArea.id)) \
.filter(DbArea.party_id == party_id)
return paginate(
items_query,
count_query,
page,
per_page,
item_mapper=_map_areas_with_seat_utilization_row,
)
def _get_areas_with_seat_utilization_query(party_id: PartyID) -> Select:
area = db.aliased(DbArea)
subquery_occupied_seat_count = select(db.func.count(DbTicket.id)) \
.filter(DbTicket.revoked == False) \
.filter(DbTicket.occupied_seat_id != None) \
.join(DbSeat) \
.filter(DbSeat.area_id == area.id) \
.scalar_subquery()
subquery_total_seat_count = select(db.func.count(DbSeat.id)) \
.filter_by(area_id=area.id) \
.scalar_subquery()
return select(
area,
subquery_occupied_seat_count,
subquery_total_seat_count,
) \
.filter(area.party_id == party_id) \
.group_by(area.id)
def _map_areas_with_seat_utilization_row(
row: tuple[DbArea, int, int]
) -> tuple[Area, SeatUtilization]:
area, occupied_seat_count, total_seat_count = row
utilization = SeatUtilization(
occupied=occupied_seat_count, total=total_seat_count
)
return _db_entity_to_area(area), utilization
def _db_entity_to_area(area: DbArea) -> Area:
return Area(
id=area.id,
party_id=area.party_id,
slug=area.slug,
title=area.title,
image_filename=area.image_filename,
image_width=area.image_width,
image_height=area.image_height,
)
| find_area_for_party_by_slug |
service.rs | // Copyright 2021, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::{
base_node_service::config::BaseNodeServiceConfig,
connectivity_service::{error::WalletConnectivityError, handle::WalletConnectivityRequest, watch::Watch},
};
use core::mem;
use futures::{
channel::{mpsc, oneshot},
stream::Fuse,
StreamExt,
};
use log::*;
use tari_comms::{
connectivity::ConnectivityRequester,
peer_manager::NodeId,
protocol::rpc::{RpcClientLease, RpcClientPool},
};
use tari_core::base_node::{rpc::BaseNodeWalletRpcClient, sync::rpc::BaseNodeSyncRpcClient};
use tokio::time;
const LOG_TARGET: &str = "wallet::connectivity";
/// Connection status of the Base Node
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum OnlineStatus {
Connecting,
Online,
Offline,
}
pub struct WalletConnectivityService {
config: BaseNodeServiceConfig,
request_stream: Fuse<mpsc::Receiver<WalletConnectivityRequest>>,
connectivity: ConnectivityRequester,
base_node_watch: Watch<Option<NodeId>>,
pools: Option<ClientPoolContainer>,
online_status_watch: Watch<OnlineStatus>,
pending_requests: Vec<ReplyOneshot>,
}
struct ClientPoolContainer {
pub base_node_wallet_rpc_client: RpcClientPool<BaseNodeWalletRpcClient>,
pub base_node_sync_rpc_client: RpcClientPool<BaseNodeSyncRpcClient>,
}
impl WalletConnectivityService {
pub(super) fn new(
config: BaseNodeServiceConfig,
request_stream: mpsc::Receiver<WalletConnectivityRequest>,
base_node_watch: Watch<Option<NodeId>>,
online_status_watch: Watch<OnlineStatus>,
connectivity: ConnectivityRequester,
) -> Self {
Self {
config,
request_stream: request_stream.fuse(),
connectivity,
base_node_watch,
pools: None,
pending_requests: Vec::new(),
online_status_watch,
}
}
pub async fn start(mut self) {
debug!(target: LOG_TARGET, "Wallet connectivity service has started.");
let mut base_node_watch_rx = self.base_node_watch.get_receiver().fuse();
loop {
futures::select! {
req = self.request_stream.select_next_some() => {
self.handle_request(req).await;
},
peer = base_node_watch_rx.select_next_some() => {
if let Some(peer) = peer {
// This will block the rest until the connection is established. This is what we want.
self.setup_base_node_connection(peer).await;
}
}
}
}
}
async fn handle_request(&mut self, request: WalletConnectivityRequest) {
use WalletConnectivityRequest::*;
match request {
ObtainBaseNodeWalletRpcClient(reply) => {
self.handle_pool_request(reply.into()).await;
},
ObtainBaseNodeSyncRpcClient(reply) => {
self.handle_pool_request(reply.into()).await;
},
SetBaseNode(peer) => {
self.set_base_node_peer(peer);
},
}
}
async fn handle_pool_request(&mut self, reply: ReplyOneshot) {
use ReplyOneshot::*;
match reply {
WalletRpc(tx) => self.handle_get_base_node_wallet_rpc_client(tx).await,
SyncRpc(tx) => self.handle_get_base_node_sync_rpc_client(tx).await,
}
}
async fn handle_get_base_node_wallet_rpc_client(
&mut self,
reply: oneshot::Sender<RpcClientLease<BaseNodeWalletRpcClient>>,
) {
match self.pools {
Some(ref pools) => match pools.base_node_wallet_rpc_client.get().await {
Ok(client) => {
let _ = reply.send(client);
},
Err(e) => {
warn!(
target: LOG_TARGET,
"Base node connection failed: {}. Reconnecting...", e
);
self.trigger_reconnect();
self.pending_requests.push(reply.into());
},
},
None => {
self.pending_requests.push(reply.into());
if self.base_node_watch.borrow().is_none() {
warn!(
target: LOG_TARGET,
"{} requests are waiting for base node to be set",
self.pending_requests.len()
);
}
},
}
}
async fn handle_get_base_node_sync_rpc_client(
&mut self,
reply: oneshot::Sender<RpcClientLease<BaseNodeSyncRpcClient>>,
) {
match self.pools {
Some(ref pools) => match pools.base_node_sync_rpc_client.get().await {
Ok(client) => {
let _ = reply.send(client);
},
Err(e) => {
warn!(
target: LOG_TARGET,
"Base node connection failed: {}. Reconnecting...", e
);
self.trigger_reconnect();
self.pending_requests.push(reply.into());
},
},
None => {
self.pending_requests.push(reply.into());
if self.base_node_watch.borrow().is_none() {
warn!(
target: LOG_TARGET,
"{} requests are waiting for base node to be set",
self.pending_requests.len()
);
}
},
}
}
fn trigger_reconnect(&mut self) {
let peer = self
.base_node_watch
.borrow()
.clone()
.expect("trigger_reconnect called before base node is set");
// Trigger the watch so that a peer connection is reinitiated
self.set_base_node_peer(peer);
}
fn | (&mut self, peer: NodeId) {
self.pools = None;
self.base_node_watch.broadcast(Some(peer));
}
async fn setup_base_node_connection(&mut self, peer: NodeId) {
self.pools = None;
loop {
debug!(
target: LOG_TARGET,
"Attempting to connect to base node peer {}...", peer
);
self.set_online_status(OnlineStatus::Connecting);
match self.try_setup_rpc_pool(peer.clone()).await {
Ok(_) => {
self.set_online_status(OnlineStatus::Online);
debug!(
target: LOG_TARGET,
"Wallet is ONLINE and connected to base node {}", peer
);
break;
},
Err(e) => {
self.set_online_status(OnlineStatus::Offline);
error!(target: LOG_TARGET, "{}", e);
time::delay_for(self.config.base_node_monitor_refresh_interval).await;
continue;
},
}
}
}
fn set_online_status(&self, status: OnlineStatus) {
let _ = self.online_status_watch.broadcast(status);
}
async fn try_setup_rpc_pool(&mut self, peer: NodeId) -> Result<(), WalletConnectivityError> {
self.connectivity.add_managed_peers(vec![peer.clone()]).await?;
let conn = self.connectivity.dial_peer(peer).await?;
debug!(
target: LOG_TARGET,
"Successfully established peer connection to base node {}",
conn.peer_node_id()
);
self.pools = Some(ClientPoolContainer {
base_node_sync_rpc_client: conn
.create_rpc_client_pool(self.config.base_node_rpc_pool_size, Default::default()),
base_node_wallet_rpc_client: conn
.create_rpc_client_pool(self.config.base_node_rpc_pool_size, Default::default()),
});
self.notify_pending_requests().await?;
debug!(
target: LOG_TARGET,
"Successfully established RPC connection {}",
conn.peer_node_id()
);
Ok(())
}
async fn notify_pending_requests(&mut self) -> Result<(), WalletConnectivityError> {
let current_pending = mem::take(&mut self.pending_requests);
for reply in current_pending {
if reply.is_canceled() {
continue;
}
self.handle_pool_request(reply).await;
}
Ok(())
}
}
enum ReplyOneshot {
WalletRpc(oneshot::Sender<RpcClientLease<BaseNodeWalletRpcClient>>),
SyncRpc(oneshot::Sender<RpcClientLease<BaseNodeSyncRpcClient>>),
}
impl ReplyOneshot {
pub fn is_canceled(&self) -> bool {
use ReplyOneshot::*;
match self {
WalletRpc(tx) => tx.is_canceled(),
SyncRpc(tx) => tx.is_canceled(),
}
}
}
impl From<oneshot::Sender<RpcClientLease<BaseNodeWalletRpcClient>>> for ReplyOneshot {
fn from(tx: oneshot::Sender<RpcClientLease<BaseNodeWalletRpcClient>>) -> Self {
ReplyOneshot::WalletRpc(tx)
}
}
impl From<oneshot::Sender<RpcClientLease<BaseNodeSyncRpcClient>>> for ReplyOneshot {
fn from(tx: oneshot::Sender<RpcClientLease<BaseNodeSyncRpcClient>>) -> Self {
ReplyOneshot::SyncRpc(tx)
}
}
| set_base_node_peer |
opsrun.py | # RUN: %PYTHON %s 2>&1 | FileCheck %s
import ctypes
import sys
from mlir.ir import *
from mlir.dialects import builtin
from mlir.dialects import linalg
from mlir.dialects import std
from mlir.passmanager import *
from mlir.execution_engine import *
# Log everything to stderr and flush so that we have a unified stream to match
# errors/info emitted by MLIR to stderr.
def log(*args):
print(*args, file=sys.stderr)
sys.stderr.flush()
matmul_boiler = """
func @main() -> f32 attributes {llvm.emit_c_interface} {
%v0 = constant 0.0 : f32
%v1 = constant 1.0 : f32
%v2 = constant 2.0 : f32
%A = memref.alloc() : memref<4x16xf32>
%B = memref.alloc() : memref<16x8xf32>
%C = memref.alloc() : memref<4x8xf32>
linalg.fill(%v1, %A) : f32, memref<4x16xf32>
linalg.fill(%v2, %B) : f32, memref<16x8xf32>
linalg.fill(%v0, %C) : f32, memref<4x8xf32>
call @matmul_on_buffers(%A, %B, %C) :
(memref<4x16xf32>, memref<16x8xf32>, memref<4x8xf32>) -> ()
%c0 = constant 0 : index
%0 = memref.load %C[%c0, %c0] : memref<4x8xf32>
// TODO: FFI-based solution to allow testing and printing with python code.
return %0 : f32
}
"""
fill_boiler = """
func @main() -> i32 attributes {llvm.emit_c_interface} {
%O = memref.alloc() : memref<4x16xi32>
%min = constant -1000.0 : f64
%max = constant 1000.0 : f64
%seed = constant 42 : i32
call @fill_on_buffers(%min, %max, %seed, %O) :
(f64, f64, i32, memref<4x16xi32>) -> ()
%c0 = constant 0 : index
%0 = memref.load %O[%c0, %c0] : memref<4x16xi32>
// TODO: FFI-based solution to allow testing and printing with python code.
return %0 : i32
}
"""
conv_boiler = """
func @main() -> i32 attributes {llvm.emit_c_interface} {
%v0 = constant 0 : i32
%v1 = constant 1.0 : f64
%v2 = constant 2.0 : f64
%input = memref.alloc() : memref<1x4x16x1xf64>
%filter = memref.alloc() : memref<2x2x1xf64>
%output = memref.alloc() : memref<1x2x4x1xi32>
linalg.fill(%v1, %input) : f64, memref<1x4x16x1xf64>
linalg.fill(%v2, %filter) : f64, memref<2x2x1xf64>
linalg.fill(%v0, %output) : i32, memref<1x2x4x1xi32>
call @conv_on_buffers(%input, %filter, %output) :
(memref<1x4x16x1xf64>, memref<2x2x1xf64>, memref<1x2x4x1xi32>) -> ()
%c0 = constant 0 : index
%0 = memref.load %output[%c0, %c0, %c0, %c0] : memref<1x2x4x1xi32>
// TODO: FFI-based solution to allow testing and printing with python code.
return %0 : i32
}
"""
pooling_boiler = """
func @main() -> i32 attributes {llvm.emit_c_interface} {
%v0 = constant 0 : i32
%v42 = constant 42.0 : f64
%v77 = constant 77.0 : f64
%v-13 = constant -13.0 : f64
%v1 = constant 1.0 : f64
%input = memref.alloc() : memref<1x4x16x1xf64>
%shape = memref.alloc() : memref<2x2xf64>
%output = memref.alloc() : memref<1x2x4x1xi32>
linalg.fill(%v1, %input) : f64, memref<1x4x16x1xf64>
linalg.fill(%v1, %shape) : f64, memref<2x2xf64>
linalg.fill(%v0, %output) : i32, memref<1x2x4x1xi32>
%c0 = constant 0 : index
%c1 = constant 1 : index
%c2 = constant 2 : index
memref.store %v42, %input[%c0, %c0, %c0, %c0] : memref<1x4x16x1xf64>
memref.store %v77, %input[%c0, %c0, %c1, %c0] : memref<1x4x16x1xf64>
memref.store %v-13, %input[%c0, %c0, %c2, %c0] : memref<1x4x16x1xf64>
call @pooling_on_buffers(%input, %shape, %output) :
(memref<1x4x16x1xf64>, memref<2x2xf64>, memref<1x2x4x1xi32>) -> ()
%0 = memref.load %output[%c0, %c0, %c0, %c0] : memref<1x2x4x1xi32>
// TODO: FFI-based solution to allow testing and printing with python code.
return %0 : i32
}
"""
def transform(module, boilerplate):
import mlir.conversions
import mlir.dialects.linalg.passes
import mlir.transforms
# TODO: Allow cloning functions from one module to another.
# Atm we have to resort to string concatenation.
mod = Module.parse(
str(module.operation.regions[0].blocks[0].operations[0].operation) +
boilerplate)
pm = PassManager.parse(
"builtin.func(convert-linalg-to-loops, lower-affine, " +
"convert-scf-to-std), convert-vector-to-llvm," +
"convert-memref-to-llvm,convert-std-to-llvm")
pm.run(mod)
return mod
def test_matmul_builtin():
with Context() as ctx, Location.unknown():
module = Module.create()
f32 = F32Type.get()
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((4, 16), f32), MemRefType.get((16, 8), f32),
MemRefType.get((4, 8), f32))
def matmul_on_buffers(lhs, rhs, out):
linalg.matmul(lhs, rhs, outs=[out])
execution_engine = ExecutionEngine(transform(module, matmul_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result f32.
# Arguments must be passed as pointers.
c_float_p = ctypes.c_float * 1
res = c_float_p(-1.)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: 32.0
test_matmul_builtin()
def test_matmul_generic():
with Context() as ctx, Location.unknown():
module = Module.create()
f32 = F32Type.get()
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((4, 16), f32), MemRefType.get((16, 8), f32),
MemRefType.get((4, 8), f32))
def matmul_on_buffers(lhs, rhs, out):
linalg.matmul(lhs, rhs, outs=[out], emit_generic=True)
execution_engine = ExecutionEngine(transform(module, matmul_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result f32.
# Arguments must be passed as pointers.
c_float_p = ctypes.c_float * 1
res = c_float_p(-1.)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: 32.0
test_matmul_generic()
def test_fill_builtin():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(f64, f64, i32, MemRefType.get((4, 16), i32))
def fill_on_buffers(min, max, seed, out):
linalg.fill_rng_2d(min, max, seed, outs=[out])
execution_engine = ExecutionEngine(transform(module, fill_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: -480
test_fill_builtin()
def test_fill_generic():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(f64, f64, i32, MemRefType.get((4, 16), i32))
def fill_on_buffers(min, max, seed, out):
linalg.fill_rng_2d(min, max, seed, outs=[out], emit_generic=True)
execution_engine = ExecutionEngine(transform(module, fill_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: -480
test_fill_generic()
def test_conv_builtin():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((1, 4, 16, 1), f64), MemRefType.get((2, 2, 1), f64),
MemRefType.get((1, 2, 4, 1), i32))
def conv_on_buffers(input, filter, output):
linalg.depthwise_conv_2d_input_nhwc_filter_hwc_poly(
input, filter, outs=[output], strides=[2, 4], dilations=[1, 2])
execution_engine = ExecutionEngine(transform(module, conv_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: 8
test_conv_builtin()
def test_conv_generic():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((1, 4, 16, 1), f64), MemRefType.get((2, 2, 1), f64),
MemRefType.get((1, 2, 4, 1), i32))
def conv_on_buffers(input, filter, output):
linalg.depthwise_conv_2d_input_nhwc_filter_hwc_poly(
input,
filter,
outs=[output],
strides=[2, 4],
dilations=[1, 2],
emit_generic=True)
execution_engine = ExecutionEngine(transform(module, conv_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: 8
test_conv_generic()
def test_max_pooling_builtin():
|
test_max_pooling_builtin()
def test_max_pooling_generic():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((1, 4, 16, 1), f64), MemRefType.get((2, 2), f64),
MemRefType.get((1, 2, 4, 1), i32))
def pooling_on_buffers(input, shape, output):
linalg.pooling_nhwc_max(
input,
shape,
outs=[output],
strides=[2, 4],
dilations=[1, 2],
emit_generic=True)
execution_engine = ExecutionEngine(transform(module, pooling_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# 77 is not selected due to the dilation 2 in the second dimension.
# CHECK: RESULT: 42
test_max_pooling_generic()
def test_min_pooling_builtin():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((1, 4, 16, 1), f64), MemRefType.get((2, 2), f64),
MemRefType.get((1, 2, 4, 1), i32))
def pooling_on_buffers(input, shape, output):
linalg.pooling_nhwc_min(
input, shape, outs=[output], strides=[2, 4], dilations=[1, 2])
execution_engine = ExecutionEngine(transform(module, pooling_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: -13
test_min_pooling_builtin()
def test_min_pooling_generic():
with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((1, 4, 16, 1), f64), MemRefType.get((2, 2), f64),
MemRefType.get((1, 2, 4, 1), i32))
def pooling_on_buffers(input, shape, output):
linalg.pooling_nhwc_min(
input,
shape,
outs=[output],
strides=[2, 4],
dilations=[1, 2],
emit_generic=True)
execution_engine = ExecutionEngine(transform(module, pooling_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# CHECK: RESULT: -13
test_min_pooling_generic()
| with Context() as ctx, Location.unknown():
module = Module.create()
f64 = F64Type.get()
i32 = IntegerType.get_signless(32)
with InsertionPoint(module.body):
@builtin.FuncOp.from_py_func(
MemRefType.get((1, 4, 16, 1), f64), MemRefType.get((2, 2), f64),
MemRefType.get((1, 2, 4, 1), i32))
def pooling_on_buffers(input, shape, output):
linalg.pooling_nhwc_max(
input, shape, outs=[output], strides=[2, 4], dilations=[1, 2])
execution_engine = ExecutionEngine(transform(module, pooling_boiler))
# TODO: FFI-based solution to allow testing and printing with python code.
# Prepare arguments: one result i32.
# Arguments must be passed as pointers.
c_int_p = ctypes.c_int * 1
res = c_int_p(-1)
execution_engine.invoke("main", res)
log("RESULT: ", res[0])
# 77 is not selected due to the dilation 2 in the second dimension.
# CHECK: RESULT: 42 |
test_user_location.py | """
Aqualink API documentation
The Aqualink public API documentation # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import aqualink_sdk
from aqualink_sdk.model.user_location import UserLocation
class | (unittest.TestCase):
"""UserLocation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testUserLocation(self):
"""Test UserLocation"""
# FIXME: construct object with mandatory attributes with example values
# model = UserLocation() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| TestUserLocation |
icon.macro.js | /* eslint-disable */
import { createMacro } from 'babel-plugin-macros';
import icons from '@ibmduo/icons/ibm-icons.json';
import { iconTemplate } from './templates';
import {
converge,
identity,
propEq,
pipe,
find,
__,
filter,
join,
map,
lensPath,
view,
defaultTo,
prop,
} from 'ramda';
/**
* @template A
*/
class OptionBase {
/**
* @template B
* @param {function(A): B} f
* @returns {OptionBase<B>}
*/
map(f) {}
/**
* @template B
* @param {function(A): B} f
* @returns {B}
*/
flatMap(f) {}
/**
* call ifSome - when instanceof Some
* call ifNone - when None
* @param ifNone
* @param ifSome
*/
fold(ifNone, ifSome) {}
}
const None = (() => {
class None extends OptionBase {
map = _f => {
return this;
};
flatMap = _f => {
return this;
};
fold = (ifNone, ifSome) => {
return ifNone();
};
}
return new None();
})();
/**
* @template A
*/
class | extends OptionBase {
/**
* @private x
*/
#x;
/**
* @param {A} x
*/
constructor(x) {
super();
this.#x = x;
}
map = f => {
return Option(f(this.#x));
};
flatMap = f => {
return f(this.#x);
};
fold = (ifNone, ifSome) => {
return ifSome(this.#x);
};
}
/**
* Option type factory. Creates Option type variants (Some(x)|None) for given x
* @param {A} x
* @returns {Option.<A>}
*/
const Option = x => (x != null ? new Some(x) : None);
/**
* creates instances of Some
* @param x
* @returns {Some}
*/
const some = x => new Some(x);
/**
* get a None (None is a singleton)
* @param _x
* @returns {None}
*/
const none = _x => None;
/**
* @template A
* @typedef {None|Some.<A>} Option
*/
const namespace = 'security--';
const getComponentNamespace = componentName => `${namespace}${componentName}`;
const getReactIcon = (() => {
const iconsObject = require('@ibmduo/icons-react');
const icons = Object.keys(iconsObject).reduce((icons, name) => {
const plainName = name.replace(/\d{2}$/, '');
const size = name.replace(plainName, '');
if (!icons.has(plainName)) {
icons.set(
plainName,
importName =>
`import ${importName} from "@ibmduo/icons-react/lib/${plainName}/16";`
);
}
icons.set(
name,
importName =>
`import ${importName} from "@ibmduo/icons-react/lib/${plainName}/${size}";`
);
return icons;
}, new Map());
return name => {
if (!icons.has(name)) {
return none();
}
return some(icons.get(name));
};
})();
const getIcon = (icons => {
const iconTable = Object.entries(icons).map(([indexName, iconObj]) => ({
...iconObj,
indexName,
}));
const some = predicateFn => (...args) => args.some(predicateFn);
const selector = converge(some(identity), [
propEq('name'),
propEq('indexName'),
propEq('id'),
]);
/**
* @sig findIcon :: IconObject ico, Option opt => string -> opt ico
*/
return pipe(
selector,
find(__, iconTable),
Option
);
})(icons);
/**
* interpolates the icon template
* @param {string} pathElem
* @param {string} name
* @returns {string}
*/
const template = (pathElem, name) =>
iconTemplate({
$NAME: name,
$PATH: pathElem,
$DEFAULT_TO_NAME: 'DEFAULT_TO',
$ICON_NAMESPACE: getComponentNamespace('icon'),
});
const removeEmpty = filter(Boolean);
const joinBySpace = join(' ');
const transformPaths = map(prop('d'));
const svgPathLens = lensPath(['svgData', 'paths']);
const reducePaths = pipe(
transformPaths,
removeEmpty,
joinBySpace
);
/**
* gets all imports and require statements in a file (just at the top level)
* -> needed to find the right position for inserting the generated icon components
* @param file
* @returns {*}
*/
const getImports = file =>
file.path.get('body').filter(path => {
if (path.type === 'VariableDeclaration') {
return path
.get('declarations')
.some(
declarationPath =>
declarationPath.get('init').isCallExpression() &&
declarationPath.get('init.callee').node.name === 'require'
);
}
return path.type === 'ImportDeclaration';
});
/**
* inserts the generated Icon components into the file's AST
* - just below the import and require declarations
* @param ast
* @param file
*/
const insert = (ast, file) => {
const imports = getImports(file);
imports[imports.length - 1].insertAfter(ast);
};
/**
* renames the usages of the icon elements to their new name
* @param openingElement
* @param closingElement
* @param name
*/
const renameIconElements = (openingElement = {}, closingElement = {}, name) => {
if (!name) {
throw new TypeError('name must be specified');
}
if (openingElement.node && openingElement.node.name) {
openingElement.node.name.name = name;
}
if (closingElement.node && closingElement.node.name) {
closingElement.node.name.name = name;
}
};
/**
* Compiles to an Icon component.
*
*/
module.exports = createMacro(({ references, state, babel }) => {
const { default: defaultImport = [] } = references;
// generate a unique name for the helper function
const defaultToIdentifier = state.file.scope.generateUidIdentifier(
'defaultTo'
);
/**
* defaultTo helper function for generated icon components
*/
const defaultToTemplate = `function DEFAULT_TO(defaultVal) {
return function(value) {
if (value == null || Number.isNaN(value)) {
return defaultVal;
}
return value;
};
}`;
const rawSourceTemplates = [];
const imports = [];
// go through all useages of the default import
defaultImport.forEach(path => {
const { parentPath } = path;
const _path = path;
// throw if macro is not used as JSX element
if (parentPath.type !== 'JSXOpeningElement') {
throw path.hub.file.buildCodeFrameError(
path.node,
'This macro must be used as JSX element.'
);
}
// read element attributes
if (parentPath.type === 'JSXOpeningElement') {
const propsByType = parentPath.parentPath
.get('openingElement.attributes')
.reduce(
(propsByType, attributePath) => {
switch (attributePath.type) {
case 'JSXAttribute': {
propsByType.props[
attributePath.get('name').node.name
] = attributePath.get('value');
break;
}
case 'JSXSpreadAttribute': {
propsByType.spread[
attributePath.get('argument').node.name
] = attributePath.get('argument');
break;
}
default: {
break;
}
}
return propsByType;
},
{ props: {}, spread: {} }
);
let { path, name = '', iconName } = propsByType.props;
// takes a path and statically evaluates its value
const evaluate = path => {
if (path) {
if (path.isStringLiteral()) {
return path.evaluate().value;
}
if (path.isJSXExpressionContainer()) {
return path.get('expression').evaluate().value;
}
}
};
const componentName = parentPath.hub.file.scope.generateUid(
evaluate(name) || evaluate(iconName) || 'Icon'
);
const component = do {
// creates a unique name for the generated component
if (!path) {
name = evaluate(name) || evaluate(iconName);
// finds the icon and concatenates its path data
const maybeIcon = getIcon(name).map(
pipe(
view(svgPathLens),
defaultTo([]),
reducePaths
)
);
maybeIcon.fold(
// we did not find a path. So we will take a look, if we find the icon in the react icons package
() =>
getReactIcon(name).fold(
// we did not find an icon nor a path.
// We pass null as path and let the defaultTo handler handle that case.
() => template(`null`, componentName),
// interpolate the template with the unique name
iconTemplate => ({
type: 'import',
template: iconTemplate(componentName),
})
),
// we found a path. So we insert it's string representation to the generated component
path => {
const pathElem = `'${path}'`;
return {
type: 'component',
template: template(pathElem, componentName),
};
}
);
} else {
({
type: 'component',
template: template(`null`, componentName),
});
}
};
// add the template to the sources that we are going to insert
switch (component.type) {
case 'import': {
imports.push(component.template);
break;
}
default: {
rawSourceTemplates.push(component.template);
}
}
renameIconElements(
_path.parentPath.parentPath.get('openingElement'),
_path.parentPath.parentPath.get('closingElement'),
componentName
);
}
});
// generate an AST from concatenated source templates
// TODO: dedupe generated icon components
if (imports.length) {
const importsAst = babel.template(imports.join('\n'))();
state.file.path.get('body')[0].insertBefore(importsAst);
}
if (rawSourceTemplates.length) {
const ast = babel.template.statements(
defaultToTemplate + '\n' + rawSourceTemplates.join('\n'),
{
plugins: ['jsx', 'objectRestSpread'],
preserveComments: true,
}
)({ DEFAULT_TO: defaultToIdentifier.name });
//console.log(babel.generate(ast));
insert(ast, state.file);
}
});
| Some |
offensive.py | from daily_fantasy_sports_scoring_calculators.core.calculators.scoring import StatisticalCategoryPointsCalculator, \
StatisticalValueCalculator
from daily_fantasy_sports_scoring_calculators.draft_kings.nfl.scoring.calculators.value_to_points.offensive import \
PassingTouchdownsCalculator as PassingTouchdownsPointsCalculator, \
HasAchievedAtLeast300YardsCalculator as HasAchievedAtLeast300PassingYardsPointsCalculator, \
PassingYardageCalculator as PassingYardagePointsCalculator, \
HasAchievedAtLeast100YardsCalculator as HasAchievedAtLeast100YardsPointsCalculator, \
NonPassingTouchdownsCalculator as NonPassingTouchdownsPointsCalculator, \
NonPassingYardsCalculator as NonPassingYardsPointsCalculator, \
TurnoversCalculator as TurnoversPointsCalculator, \
TwoPointConversionsCalculator as TwoPointConversionsPointsCalculator, \
ReceptionsCalculator as ReceptionsPointsCalculator
from daily_fantasy_sports_scoring_calculators.draft_kings.nfl.statistics.calculators.offensive import \
PassingTouchdownsCalculator as PassingTouchdownsValueCalculator, \
HasAchievedMinimumYardageRequirementCalculator as HasAchievedMinimumYardageRequirementValueCalculator, \
InterceptionsCalculator as InterceptionsValueCalculator, \
RushingTouchdownsCalculator as RushingTouchdownsValueCalculator, \
RushingYardageCalculator as RushingYardageValueCalculator, \
ReceivingTouchdownsCalculator as ReceivingTouchdownsValueCalculator, \
ReceptionsCalculator as ReceptionsValueCalculator, \
KickoffsReturnTouchdownsCalculator as KickoffsReturnTouchdownsValueCalculator, \
PuntReturnTouchdownsCalculator as PuntReturnTouchdownsValueCalculator, \
FieldGoalReturnTouchdownsCalculator as FieldGoalReturnTouchdownsValueCalculator, \
FumblesLostCalculator as FumblesLostValueCalculator, \
TwoPointConversionsCaughtCalculator as TwoPointConversionsCaughtValueCalculator, \
TwoPointConversionsRushedCalculator as TwoPointConversionsRushedValueCalculator, \
TwoPointConversionsThrownCalculator as TwoPointConversionsThrownValueCalculator, \
FumbleRecoveryTouchdownsCalculator as FumbleRecoveryTouchdownsValueCalculator, \
ReceivingYardageCalculator as ReceivingYardageValueCalculator, \
PassingYardageCalculator as PassingYardageValueCalculator
passing_yardage_value_calculator = PassingYardageValueCalculator()
receiving_yardage_value_calculator = ReceivingYardageValueCalculator()
rushing_yardage_value_calculator = RushingYardageValueCalculator()
non_passing_yards_points_calculator = NonPassingYardsPointsCalculator()
class PassingTouchdownsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
PassingTouchdownsValueCalculator(),
PassingTouchdownsPointsCalculator())
class NonPassingTouchdownsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, NonPassingTouchdownsPointsCalculator())
class HasAchievedAtLeast300PassingYardsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=passing_yardage_value_calculator,
minimum_inclusive_required_yardage=300
),
HasAchievedAtLeast300PassingYardsPointsCalculator()
)
class PassingYardageCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
passing_yardage_value_calculator,
PassingYardagePointsCalculator())
class TurnoversCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, TurnoversPointsCalculator())
def __eq__(self, o: object) -> bool:
if isinstance(o, TurnoversCalculator):
return o.value_calculator == self.value_calculator and super().__eq__(o)
return False
def __hash__(self):
return hash((self.value_calculator, super().__hash__()))
class InterceptionsCalculator(TurnoversCalculator):
def __init__(self):
super().__init__(InterceptionsValueCalculator())
class RushingTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(RushingTouchdownsValueCalculator())
class | (StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
rushing_yardage_value_calculator,
non_passing_yards_points_calculator)
class HasReached100YardsRushingPointsLimit(
StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=rushing_yardage_value_calculator,
minimum_inclusive_required_yardage=100
),
HasAchievedAtLeast100YardsPointsCalculator()
)
class ReceivingTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(ReceivingTouchdownsValueCalculator())
class ReceivingYardsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
receiving_yardage_value_calculator,
non_passing_yards_points_calculator)
class HasReached100YardsReceivingCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
value_calculator=HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=receiving_yardage_value_calculator,
minimum_inclusive_required_yardage=100),
points_calculator=HasAchievedAtLeast100YardsPointsCalculator())
class ReceptionsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(ReceptionsValueCalculator(), ReceptionsPointsCalculator())
class PuntReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(PuntReturnTouchdownsValueCalculator())
class KickReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(KickoffsReturnTouchdownsValueCalculator())
class FieldGoalReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(FieldGoalReturnTouchdownsValueCalculator())
class FumblesLostCalculator(TurnoversCalculator):
def __init__(self):
super().__init__(FumblesLostValueCalculator())
class TwoPointConversionCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, TwoPointConversionsPointsCalculator())
def __eq__(self, o: object) -> bool:
if isinstance(o, TwoPointConversionCalculator):
return o.value_calculator == self.value_calculator and super().__eq__(o)
return False
def __hash__(self):
return hash((self.value_calculator, super().__hash__()))
class TwoPointConversionsThrownCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsThrownValueCalculator())
class TwoPointConversionsCaughtCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsCaughtValueCalculator())
class TwoPointConversionsRushedCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsRushedValueCalculator())
class FumbleRecoveryTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(FumbleRecoveryTouchdownsValueCalculator())
| RushingYardageCalculator |
pgadmin.go | package cmd
/*
Copyright 2020 Crunchy Data Solutions, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import (
"fmt"
"os"
"strings"
"github.com/crunchydata/postgres-operator/cmd/pgo/api"
"github.com/crunchydata/postgres-operator/cmd/pgo/util"
msgs "github.com/crunchydata/postgres-operator/pkg/apiservermsgs"
)
// showPgAdminTextPadding contains the values for what the text padding should be
type showPgAdminTextPadding struct {
ClusterName int
ClusterIP int
ExternalIP int
ServiceName int
}
// updatePgAdminTextPadding contains the values for what the text padding should be
type updatePgAdminTextPadding struct {
ClusterName int
ErrorMessage int
Status int
}
func createPgAdmin(args []string, ns string) {
if Selector == "" && len(args) == 0 {
fmt.Println("Error: The --selector flag is required when cluster is unspecified.")
os.Exit(1)
}
request := msgs.CreatePgAdminRequest{
Args: args,
ClientVersion: msgs.PGO_VERSION,
Namespace: ns,
Selector: Selector,
}
response, err := api.CreatePgAdmin(httpclient, &SessionCredentials, &request)
if err != nil {
fmt.Println("Error: " + err.Error())
os.Exit(1)
}
// this is slightly rewritten from the legacy method
if response.Status.Code != msgs.Ok {
fmt.Println("Error: " + response.Status.Msg)
for _, v := range response.Results {
fmt.Println(v)
}
os.Exit(1)
}
for _, v := range response.Results {
fmt.Println(v)
}
}
func deletePgAdmin(args []string, ns string) {
if Selector == "" && len(args) == 0 |
// set up the API request
request := msgs.DeletePgAdminRequest{
Args: args,
ClientVersion: msgs.PGO_VERSION,
Selector: Selector,
Namespace: ns,
}
response, err := api.DeletePgAdmin(httpclient, &SessionCredentials, &request)
if err != nil {
fmt.Println("Error: " + err.Error())
os.Exit(1)
}
if response.Status.Code == msgs.Ok {
for _, v := range response.Results {
fmt.Println(v)
}
} else {
fmt.Println("Error: " + response.Status.Msg)
os.Exit(1)
}
}
// makeShowPgAdminInterface returns an interface slice of the available values
// in show pgadmin
func makeShowPgAdminInterface(values []msgs.ShowPgAdminDetail) []interface{} {
// iterate through the list of values to make the interface
showPgAdminInterface := make([]interface{}, len(values))
for i, value := range values {
showPgAdminInterface[i] = value
}
return showPgAdminInterface
}
// printShowPgAdminText prints out the information around each PostgreSQL
// cluster's pgAdmin
// printShowPgAdminText renders a text response
func printShowPgAdminText(response msgs.ShowPgAdminResponse) {
// if the request errored, return the message here and exit with an error
if response.Status.Code != msgs.Ok {
fmt.Println("Error: " + response.Status.Msg)
os.Exit(1)
}
// if no results returned, return an error
if len(response.Results) == 0 {
fmt.Println("Nothing found.")
return
}
// make the interface for the pgadmin clusters
showPgAdminInterface := makeShowPgAdminInterface(response.Results)
// format the header
// start by setting up the different text paddings
padding := showPgAdminTextPadding{
ClusterName: getMaxLength(showPgAdminInterface, headingCluster, "ClusterName"),
ClusterIP: getMaxLength(showPgAdminInterface, headingClusterIP, "ServiceClusterIP"),
ExternalIP: getMaxLength(showPgAdminInterface, headingExternalIP, "ServiceExternalIP"),
ServiceName: getMaxLength(showPgAdminInterface, headingService, "ServiceName"),
}
printShowPgAdminTextHeader(padding)
// iterate through the reuslts and print them out
for _, result := range response.Results {
printShowPgAdminTextRow(result, padding)
}
}
// printShowPgAdminTextHeader prints out the header
func printShowPgAdminTextHeader(padding showPgAdminTextPadding) {
// print the header
fmt.Println("")
fmt.Printf("%s", util.Rpad(headingCluster, " ", padding.ClusterName))
fmt.Printf("%s", util.Rpad(headingService, " ", padding.ServiceName))
fmt.Printf("%s", util.Rpad(headingClusterIP, " ", padding.ClusterIP))
fmt.Printf("%s", util.Rpad(headingExternalIP, " ", padding.ExternalIP))
fmt.Println("")
// print the layer below the header...which prints out a bunch of "-" that's
// 1 less than the padding value
fmt.Println(
strings.Repeat("-", padding.ClusterName-1),
strings.Repeat("-", padding.ServiceName-1),
strings.Repeat("-", padding.ClusterIP-1),
strings.Repeat("-", padding.ExternalIP-1),
)
}
// printShowPgAdminTextRow prints a row of the text data
func printShowPgAdminTextRow(result msgs.ShowPgAdminDetail, padding showPgAdminTextPadding) {
fmt.Printf("%s", util.Rpad(result.ClusterName, " ", padding.ClusterName))
fmt.Printf("%s", util.Rpad(result.ServiceName, " ", padding.ServiceName))
fmt.Printf("%s", util.Rpad(result.ServiceClusterIP, " ", padding.ClusterIP))
fmt.Printf("%s", util.Rpad(result.ServiceExternalIP, " ", padding.ExternalIP))
fmt.Println("")
}
// showPgAdmin prepares to make an API requests to display information about
// one or more pgAdmin deployments. "clusterNames" is an array of cluster
// names to iterate over
func showPgAdmin(namespace string, clusterNames []string) {
// first, determine if any arguments have been pass in
if len(clusterNames) == 0 && Selector == "" {
fmt.Println("Error: You must provide at least one cluster name, or use a selector with the `--selector` flag")
os.Exit(1)
}
request := msgs.ShowPgAdminRequest{
ClusterNames: clusterNames,
Namespace: namespace,
Selector: Selector,
}
response, err := api.ShowPgAdmin(httpclient, &SessionCredentials, request)
if err != nil {
fmt.Println("Error:", err.Error())
os.Exit(1)
}
// great! now we can work on interpreting the results and outputting them
// per the user's desired output format
// render the next bit based on the output type
switch OutputFormat {
case "json":
fmt.Println("outputting in json")
printJSON(response)
default:
fmt.Println("outputting text")
printShowPgAdminText(response)
}
}
| {
fmt.Println("Error: The --selector flag or a cluster name is required.")
os.Exit(1)
} |
generate_universe_test.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for generate_universe.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os import path
from absl.testing import absltest
from tests import test_constants
from validate import generate_universe
_DEFAULT_ONTOLOGY_LOCATION = test_constants.ONTOLOGY_ROOT
_BAD_MODIFIED_ONTOLOGY = path.join(test_constants.TEST_RESOURCES, 'BAD',
'BAD_FORMAT')
_NONEXISTENT_LOCATION = path.join(test_constants.TEST_ROOT, 'nonexistent')
_EMPTY_FOLDER = path.join(test_constants.TEST_RESOURCES, 'BAD', 'BAD_EMPTY')
class | (absltest.TestCase):
def testCanGenerateUniverse(self):
universe = generate_universe.BuildUniverse(_DEFAULT_ONTOLOGY_LOCATION)
self.assertTrue(universe)
def testCatchInvalidModifiedOntology(self):
with self.assertRaises(Exception) as context:
generate_universe.BuildUniverse(_BAD_MODIFIED_ONTOLOGY)
self.assertIn('no longer valid', str(context.exception))
def testModifiedTypesCatchesNonexistent(self):
self.assertRaises(Exception,
generate_universe.BuildUniverse(_NONEXISTENT_LOCATION))
def testModifiedTypesCatchesEmpty(self):
self.assertRaises(Exception, generate_universe.BuildUniverse(_EMPTY_FOLDER))
if __name__ == '__main__':
absltest.main()
| GenerateUniverseTest |
test_bootstrap.py | """Test the bootstrapping."""
# pylint: disable=protected-access
import asyncio
import logging
import os
from unittest.mock import Mock, patch
from homeassistant import bootstrap
import homeassistant.config as config_util
import homeassistant.util.dt as dt_util
from tests.common import (
MockModule,
get_test_config_dir,
mock_coro,
mock_integration, | VERSION_PATH = os.path.join(get_test_config_dir(), config_util.VERSION_FILE)
_LOGGER = logging.getLogger(__name__)
# prevent .HA_VERSION file from being written
@patch("homeassistant.bootstrap.conf_util.process_ha_config_upgrade", Mock())
@patch(
"homeassistant.util.location.async_detect_location_info",
Mock(return_value=mock_coro(None)),
)
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
@patch("homeassistant.bootstrap.async_enable_logging", Mock(return_value=True))
def test_from_config_file(hass):
"""Test with configuration file."""
components = set(["browser", "conversation", "script"])
files = {"config.yaml": "".join("{}:\n".format(comp) for comp in components)}
with patch_yaml_files(files, True):
yield from bootstrap.async_from_config_file("config.yaml", hass)
assert components == hass.config.components
@patch("homeassistant.bootstrap.async_enable_logging", Mock())
@asyncio.coroutine
def test_home_assistant_core_config_validation(hass):
"""Test if we pass in wrong information for HA conf."""
# Extensive HA conf validation testing is done
result = yield from bootstrap.async_from_config_dict(
{"homeassistant": {"latitude": "some string"}}, hass
)
assert result is None
async def test_async_from_config_file_not_mount_deps_folder(loop):
"""Test that we not mount the deps folder inside async_from_config_file."""
hass = Mock(async_add_executor_job=Mock(side_effect=lambda *args: mock_coro()))
with patch("homeassistant.bootstrap.is_virtual_env", return_value=False), patch(
"homeassistant.bootstrap.async_enable_logging", return_value=mock_coro()
), patch(
"homeassistant.bootstrap.async_mount_local_lib_path", return_value=mock_coro()
) as mock_mount, patch(
"homeassistant.bootstrap.async_from_config_dict", return_value=mock_coro()
):
await bootstrap.async_from_config_file("mock-path", hass)
assert len(mock_mount.mock_calls) == 1
with patch("homeassistant.bootstrap.is_virtual_env", return_value=True), patch(
"homeassistant.bootstrap.async_enable_logging", return_value=mock_coro()
), patch(
"homeassistant.bootstrap.async_mount_local_lib_path", return_value=mock_coro()
) as mock_mount, patch(
"homeassistant.bootstrap.async_from_config_dict", return_value=mock_coro()
):
await bootstrap.async_from_config_file("mock-path", hass)
assert len(mock_mount.mock_calls) == 0
async def test_load_hassio(hass):
"""Test that we load Hass.io component."""
with patch.dict(os.environ, {}, clear=True):
assert bootstrap._get_domains(hass, {}) == set()
with patch.dict(os.environ, {"HASSIO": "1"}):
assert bootstrap._get_domains(hass, {}) == {"hassio"}
async def test_empty_setup(hass):
"""Test an empty set up loads the core."""
await bootstrap._async_set_up_integrations(hass, {})
for domain in bootstrap.CORE_INTEGRATIONS:
assert domain in hass.config.components, domain
async def test_core_failure_aborts(hass, caplog):
"""Test failing core setup aborts further setup."""
with patch(
"homeassistant.components.homeassistant.async_setup",
return_value=mock_coro(False),
):
await bootstrap._async_set_up_integrations(hass, {"group": {}})
assert "core failed to initialize" in caplog.text
# We aborted early, group not set up
assert "group" not in hass.config.components
async def test_setting_up_config(hass, caplog):
"""Test we set up domains in config."""
await bootstrap._async_set_up_integrations(
hass, {"group hello": {}, "homeassistant": {}}
)
assert "group" in hass.config.components
async def test_setup_after_deps_all_present(hass, caplog):
"""Test after_dependencies when all present."""
caplog.set_level(logging.DEBUG)
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass, MockModule(domain="root", async_setup=gen_domain_setup("root"))
)
mock_integration(
hass,
MockModule(
domain="first_dep",
async_setup=gen_domain_setup("first_dep"),
partial_manifest={"after_dependencies": ["root"]},
),
)
mock_integration(
hass,
MockModule(
domain="second_dep",
async_setup=gen_domain_setup("second_dep"),
partial_manifest={"after_dependencies": ["first_dep"]},
),
)
await bootstrap._async_set_up_integrations(
hass, {"root": {}, "first_dep": {}, "second_dep": {}}
)
assert "root" in hass.config.components
assert "first_dep" in hass.config.components
assert "second_dep" in hass.config.components
assert order == ["root", "first_dep", "second_dep"]
async def test_setup_after_deps_not_trigger_load(hass, caplog):
"""Test after_dependencies does not trigger loading it."""
caplog.set_level(logging.DEBUG)
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass, MockModule(domain="root", async_setup=gen_domain_setup("root"))
)
mock_integration(
hass,
MockModule(
domain="first_dep",
async_setup=gen_domain_setup("first_dep"),
partial_manifest={"after_dependencies": ["root"]},
),
)
mock_integration(
hass,
MockModule(
domain="second_dep",
async_setup=gen_domain_setup("second_dep"),
partial_manifest={"after_dependencies": ["first_dep"]},
),
)
await bootstrap._async_set_up_integrations(hass, {"root": {}, "second_dep": {}})
assert "root" in hass.config.components
assert "first_dep" not in hass.config.components
assert "second_dep" in hass.config.components
assert order == ["root", "second_dep"]
async def test_setup_after_deps_not_present(hass, caplog):
"""Test after_dependencies when referenced integration doesn't exist."""
caplog.set_level(logging.DEBUG)
order = []
def gen_domain_setup(domain):
async def async_setup(hass, config):
order.append(domain)
return True
return async_setup
mock_integration(
hass, MockModule(domain="root", async_setup=gen_domain_setup("root"))
)
mock_integration(
hass,
MockModule(
domain="second_dep",
async_setup=gen_domain_setup("second_dep"),
partial_manifest={"after_dependencies": ["first_dep"]},
),
)
await bootstrap._async_set_up_integrations(
hass, {"root": {}, "first_dep": {}, "second_dep": {}}
)
assert "root" in hass.config.components
assert "first_dep" not in hass.config.components
assert "second_dep" in hass.config.components
assert order == ["root", "second_dep"] | patch_yaml_files,
)
ORIG_TIMEZONE = dt_util.DEFAULT_TIME_ZONE |
game-report.js | /**
* @fileOverview Generates the game report on players, win rates, scores,
* KataGo choices, good moves, bad moves, and bad hot spots.
*/
class | {
constructor(gametree) {
this.root = gametree.root;
this.goodmovewinrate = gametree.opts.maxWinrateDropForGoodMove / 100;
this.badmovewinrate = gametree.opts.minWinrateDropForBadMove / 100;
this.badhotspotwinrate = gametree.opts.minWinrateDropForBadHotSpot / 100;
this.visits = gametree.maxVisits;
const drops = gametree.nodes.map((node, index) => ({
index,
pl: node.pl,
winrateDrop: node.winrateDrop,
scoreDrop: node.scoreDrop,
choice: node.choice,
}));
this.goodBads = {
B: makeGoodBads('B', drops, this),
W: makeGoodBads('W', drops, this),
};
}
// Generates game report.
reportGame() {
if (this.report) return this.report;
const ofRoot = (key) => this.root[key] && this.root[key][0].trim();
const evkmredt = [
ofRoot('EV') || ofRoot('GN'),
ofRoot('KM') ? `Komi ${ofRoot('KM')}` : '',
ofRoot('RE'),
ofRoot('DT'),
]
.filter((v) => v)
.join(', ');
const title = evkmredt ? `\n${evkmredt}\n` : '';
const pb = plColor(ofRoot('PB'), 'Black');
const pw = plColor(ofRoot('PW'), 'White');
const reportPlayer = (goodBads, that) =>
reportGoodAndBads(
goodBads,
that.goodmovewinrate,
that.badmovewinrate,
that.badhotspotwinrate,
);
this.report =
`# Analyze-SGF Report\n${title}` +
`\n${pb}\n${reportPlayer(this.goodBads.B, this)}` +
`\n${pw}\n${reportPlayer(this.goodBads.W, this)}` +
`\nAnalyzed by KataGo Parallel Analysis Engine ` +
`(${this.visits} max visits).`;
return this.report;
}
// Generates 'Bad moves left' report.
reportBadsLeft(turnNumber) {
const report =
getBadsLeft('B', this.goodBads.B, turnNumber) +
getBadsLeft('W', this.goodBads.W, turnNumber);
return report ? `Bad moves left\n\n${report}` : '';
}
}
function makeGoodBads(pl, drops, stat) {
return [
// 0: Good moves.
drops.filter((n) => n.pl === pl && n.winrateDrop < stat.goodmovewinrate),
// 1: Not bad moves.
drops.filter((n) => n.pl === pl && n.winrateDrop < stat.badmovewinrate),
// 2: Bad moves.
drops.filter((n) => n.pl === pl && n.winrateDrop >= stat.badmovewinrate),
// 3: Bad hot spots.
drops.filter(
(n) => n.pl === pl && n.winrateDrop >= stat.badhotspotwinrate,
),
// 4: Top 10 win rate drops.
drops
.filter((n) => n.pl === pl && n.winrateDrop)
.sort((a, b) => b.winrateDrop - a.winrateDrop)
.slice(0, 10),
// 5: Top 10 score drops.
drops
.filter((n) => n.pl === pl && n.scoreDrop)
.sort((a, b) => b.scoreDrop - a.scoreDrop)
.slice(0, 10),
// 6: Total.
drops.filter((n) => n.pl === pl),
// 7: KataGo Top Choices.
drops.filter((n) => n.pl === pl && n.choice === 0),
];
}
const percentage = (f) => (f * 100).toFixed(2);
// e.g.,
// * More than 5% win rate drops (5.56%, 5/90): #79 ⇣9.20%, #83 ⇣8.49%, ...
function getDropList(text, moves, total, listMoves, withDrop, isScore) {
if (!moves.length) return '';
return [
`* ${text}`,
total
? ` (${percentage(moves.length / total)}%, ${moves.length}/${total})`
: '',
listMoves ? ': ' : '',
listMoves && withDrop && isScore
? moves
.map((m) => `#${m.index + 1} ⇣${m.scoreDrop.toFixed(2)}`)
.join(', ')
: '',
listMoves && withDrop && !isScore
? moves
.map((m) => `#${m.index + 1} ⇣${percentage(m.winrateDrop)}%`)
.join(', ')
: '',
listMoves && !withDrop
? moves.map((m) => `#${m.index + 1}`).join(', ')
: '',
'\n',
].join('');
}
// e.g.,
// * KataGo top choices (54.81%, 57/104)
// * Less than 2% win rate drops (83.33%, 75/90)
// * Less than 5% win rate drops (94.44%, 85/90)
// * More than 5% win rate drops (5.56%, 5/90): #79 ⇣9.20%, #83 ⇣8.49%, ...
// * More than 20% win rate drops (2.22%, 2/90): #89 ⇣25.12%, #93 ⇣26.86%
// * Top 10 win rate drops: #93 ⇣26.86%, #89 ⇣25.12%, ...
// * Top 10 score drops: #89 ⇣6.34, #93 ⇣4.61, #167 ⇣4.40, ...
function reportGoodAndBads(
moves,
goodmovewinrate,
badmovewinrate,
badhotspotwinrate,
) {
const total = moves[6].length;
const goodmove = `Less than ${goodmovewinrate * 100}% win rate drops`;
const notbadmove = `Less than ${badmovewinrate * 100}% win rate drops`;
const badmove = `More than ${badmovewinrate * 100}% win rate drops`;
const badhotspot = `More than ${badhotspotwinrate * 100}% win rate drops`;
return (
getDropList('KataGo top choices', moves[7], total, false) +
getDropList(goodmove, moves[0], total, false) +
getDropList(notbadmove, moves[1], total, false) +
getDropList(badmove, moves[2], total, true, true) +
getDropList(badhotspot, moves[3], total, true, true) +
getDropList('Top 10 win rate drops', moves[4], null, true, true) +
getDropList('Top 10 score drops', moves[5], null, true, true, true)
);
}
function plColor(pl, color) {
return pl ? `${pl} (${color})` : color;
}
// e.g.,
// * Blacks bad moves: #117 ⇣14.99%, #127 ⇣11.81%, ...
// * Blacks bad hot spots: #129 ⇣30.29%
function getBadsLeft(pl, goodBads, turnNumber) {
const badmovesText = pl === 'B' ? 'Black bad moves' : 'White bad moves';
const hotspotText = pl === 'B' ? 'Black bad hot spots' : 'White hot spots';
const movesLeft = (i, turn) => goodBads[i].filter((m) => m.index > turn);
return (
getDropList(badmovesText, movesLeft(2, turnNumber), null, true, true) +
getDropList(hotspotText, movesLeft(3, turnNumber), null, true, true)
);
}
module.exports = GameReport;
| GameReport |
index.spec.js | import React from 'react';
import { shallow, mount } from 'enzyme';
import thunk from 'redux-thunk';
import configureMockStore from 'redux-mock-store';
import { BrowserRouter as Router } from 'react-router-dom';
import Signup, { signupComponent } from './index';
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
let store;
describe('Sign up', () => {
beforeEach(() => {
store = mockStore({
auth: {
authError: null,
user: []
}
});
});
it('should render correctly in "debug" mode', () => {
const component = shallow(<signupComponent />);
expect(component).toMatchSnapshot();
});
| const component = mount(
<Router>
<Signup store={store} />
</Router>
);
expect(component).toMatchSnapshot();
});
}); | it('should render signup with store', () => { |
string1.py | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
| # passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
return 'Number of donuts: {}'.format(count if count < 10 else 'many')
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
if len(s) < 2:
return ''
return s[0:2] + s[-2:]
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
return s[0] + s[1:].replace(s[0], '*')
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
return '{} {}'.format(b[:2] + a[2:], a[:2] + b[2:])
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print('%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print('donuts')
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print()
print('both_ends')
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print()
print('fix_start')
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print()
print('mix_up')
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main() |
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number |
process.py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Forward processing of raw data to sRGB images.
Unprocessing Images for Learned Raw Denoising
http://timothybrooks.com/tech/unprocessing
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
def apply_gains(bayer_images, red_gains, blue_gains):
"""Applies white balance gains to a batch of Bayer images."""
bayer_images.shape.assert_is_compatible_with((None, None, None, 4))
green_gains = tf.ones_like(red_gains)
gains = tf.stack([red_gains, green_gains, green_gains, blue_gains], axis=-1)
gains = gains[:, tf.newaxis, tf.newaxis, :]
return bayer_images * gains
def demosaic(bayer_images):
"""Bilinearly demosaics a batch of RGGB Bayer images."""
bayer_images.shape.assert_is_compatible_with((None, None, None, 4))
# This implementation exploits how edges are aligned when upsampling with
# tf.image.resize_bilinear().
with tf.name_scope(None, 'demosaic'):
shape = tf.shape(bayer_images)
shape = [shape[1] * 2, shape[2] * 2]
red = bayer_images[Ellipsis, 0:1]
red = tf.image.resize_bilinear(red, shape)
green_red = bayer_images[Ellipsis, 1:2]
green_red = tf.image.flip_left_right(green_red)
green_red = tf.image.resize_bilinear(green_red, shape)
green_red = tf.image.flip_left_right(green_red)
green_red = tf.space_to_depth(green_red, 2)
green_blue = bayer_images[Ellipsis, 2:3]
green_blue = tf.image.flip_up_down(green_blue)
green_blue = tf.image.resize_bilinear(green_blue, shape)
green_blue = tf.image.flip_up_down(green_blue)
green_blue = tf.space_to_depth(green_blue, 2)
green_at_red = (green_red[Ellipsis, 0] + green_blue[Ellipsis, 0]) / 2
green_at_green_red = green_red[Ellipsis, 1]
green_at_green_blue = green_blue[Ellipsis, 2]
green_at_blue = (green_red[Ellipsis, 3] + green_blue[Ellipsis, 3]) / 2
green_planes = [
green_at_red, green_at_green_red, green_at_green_blue, green_at_blue
]
green = tf.depth_to_space(tf.stack(green_planes, axis=-1), 2)
blue = bayer_images[Ellipsis, 3:4]
blue = tf.image.flip_up_down(tf.image.flip_left_right(blue))
blue = tf.image.resize_bilinear(blue, shape)
blue = tf.image.flip_up_down(tf.image.flip_left_right(blue))
rgb_images = tf.concat([red, green, blue], axis=-1)
return rgb_images
def | (images, ccms):
"""Applies color correction matrices."""
images.shape.assert_has_rank(4)
images = images[:, :, :, tf.newaxis, :]
ccms = ccms[:, tf.newaxis, tf.newaxis, :, :]
return tf.reduce_sum(images * ccms, axis=-1)
def gamma_compression(images, gamma=2.2):
"""Converts from linear to gamma space."""
# Clamps to prevent numerical instability of gradients near zero.
return tf.maximum(images, 1e-8) ** (1.0 / gamma)
def process(bayer_images, red_gains, blue_gains, cam2rgbs):
"""Processes a batch of Bayer RGGB images into sRGB images."""
bayer_images.shape.assert_is_compatible_with((None, None, None, 4))
with tf.name_scope(None, 'process'):
# White balance.
bayer_images = apply_gains(bayer_images, red_gains, blue_gains)
# Demosaic.
bayer_images = tf.clip_by_value(bayer_images, 0.0, 1.0)
images = demosaic(bayer_images)
# Color correction.
images = apply_ccms(images, cam2rgbs)
# Gamma compression.
images = tf.clip_by_value(images, 0.0, 1.0)
images = gamma_compression(images)
return images
| apply_ccms |
dataset_builder.py | import logging
import os
from torch.utils.data import DataLoader
from locator import Locator
class DatasetBuilder:
def __init__(self, val_data, dataset_factory_name, tokenisor_factory_name, train_data=None, num_workers=None,
batch_size=8, addition_args_dict=None):
self._addition_args_dict = addition_args_dict
self.train_data = train_data
self.val_data = val_data
self.batch_size = batch_size
self._dataset_factory = Locator().get(dataset_factory_name)
self._tokenisor_factory = Locator().get(tokenisor_factory_name)
self.num_workers = num_workers or os.cpu_count() - 1 | if self.num_workers <= 0:
self.num_workers = 0
self._tokenisor = None
self._train_dataloader = None
self._train_dataset = None
self._val_dataset = None
self._val_dataloader = None
self._scorers = None
self._label_mapper = None
@property
def _logger(self):
return logging.getLogger(__name__)
def get_tokenisor(self):
self._logger.info("Retrieving Tokeniser")
if self._tokenisor is None:
self._tokenisor = self._tokenisor_factory.get_tokenisor(**self._addition_args_dict)
return self._tokenisor
def get_train_dataset(self):
if self._train_dataset is None:
self._train_dataset = self._dataset_factory.get_dataset(self.train_data,
preprocessors=self.get_tokenisor(),
**self._addition_args_dict)
return self._train_dataset
def get_val_dataset(self):
if self._val_dataset is None:
self._val_dataset = self._dataset_factory.get_dataset(self.val_data, preprocessors=self.get_tokenisor(),
**self._addition_args_dict)
return self._val_dataset
def get_label_mapper(self):
if self._label_mapper is None:
self._label_mapper = self._dataset_factory.get_label_mapper()
return self._label_mapper
def num_classes(self):
return self.get_label_mapper().num_classes
def positive_label_index(self):
return self._label_mapper.positive_label_index
def get_scorers(self):
if self._scorers is None:
self._scorers = self._dataset_factory.get_scorers()
return self._scorers
def get_train_dataloader(self):
if self._train_dataloader is None:
self._train_dataloader = DataLoader(dataset=self.get_train_dataset(), num_workers=self.num_workers,
batch_size=self.batch_size, shuffle=True)
return self._train_dataloader
def get_val_dataloader(self):
if self._val_dataloader is None:
self._val_dataloader = DataLoader(dataset=self.get_val_dataset(), num_workers=self.num_workers,
batch_size=self.batch_size, shuffle=False)
return self._val_dataloader | |
acme_test.py | import unittest
from acme import Product, BoxingGlove
from acme_report import generate_products, ADJECTIVES, NOUNS
"""Tests for Acme Python modules."""
class AcmeProductTests(unittest.TestCase):
"""Making sure Acme products are the tops!"""
def test_default_product_price(self):
"""Test default product price being 10."""
prod = Product('Test Product')
self.assertEqual(prod.price, 10)
def test_default_product_weight(self):
"""Test default product weight being 20."""
prod = Product('Another day, another anvil')
self.assertEqual(prod.weight, 20)
def test_default_boxing_weight(self):
"""Test weight of default boxing gloves"""
glove = BoxingGlove('Punch Puncher')
self.assertEqual(glove.weight, 10)
def test_stealable_and_explode(self):
"""Is a product stealable or explosive...or both?"""
babomb = Product('Danger!', price=20, weight=20, flammability=2.5)
self.assertEqual(babomb.stealability(), 'Very stealable!')
self.assertEqual(babomb.explode(), '...BABOOM!!')
class AcmeReportTests(unittest.TestCase):
"""Running unittest on products."""
def test_default_num_products(self):
"""Check that Acme makes 30 products by default."""
self.assertEqual(len(generate_products()), 30)
def test_legal_names(self):
|
if __name__ == '__main__':
unittest.main()
| """Check that all products have valid names."""
for product in generate_products():
adjective, noun = product.name.split()
self.assertIn(adjective, ADJECTIVES)
self.assertIn(noun, NOUNS) |
sty.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains `TyKind` and its major components.
use hir;
use hir::def_id::DefId;
use infer::canonical::Canonical;
use mir::interpret::ConstValue;
use middle::region;
use polonius_engine::Atom;
use rustc_data_structures::indexed_vec::Idx;
use ty::subst::{Substs, Subst, Kind, UnpackedKind};
use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable};
use ty::{List, TyS, ParamEnvAnd, ParamEnv};
use util::captures::Captures;
use mir::interpret::{Scalar, Pointer};
use smallvec::SmallVec;
use std::iter;
use std::cmp::Ordering;
use rustc_target::spec::abi;
use syntax::ast::{self, Ident};
use syntax::symbol::{keywords, InternedString};
use serialize;
use self::InferTy::*;
use self::TyKind::*;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct TypeAndMut<'tcx> {
pub ty: Ty<'tcx>,
pub mutbl: hir::Mutability,
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
RustcEncodable, RustcDecodable, Copy)]
/// A "free" region `fr` can be interpreted as "some region
/// at least as big as the scope `fr.scope`".
pub struct FreeRegion {
pub scope: DefId,
pub bound_region: BoundRegion,
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
RustcEncodable, RustcDecodable, Copy)]
pub enum BoundRegion {
/// An anonymous region parameter for a given fn (&T)
BrAnon(u32),
/// Named region parameters for functions (a in &'a T)
///
/// The def-id is needed to distinguish free regions in
/// the event of shadowing.
BrNamed(DefId, InternedString),
/// Fresh bound identifiers created during GLB computations.
BrFresh(u32),
/// Anonymous region for the implicit env pointer parameter
/// to a closure
BrEnv,
}
impl BoundRegion {
pub fn is_named(&self) -> bool {
match *self {
BoundRegion::BrNamed(..) => true,
_ => false,
}
}
/// When canonicalizing, we replace unbound inference variables and free
/// regions with anonymous late bound regions. This method asserts that
/// we have an anonymous late bound region, which hence may refer to
/// a canonical variable.
pub fn assert_bound_var(&self) -> BoundVar {
match *self {
BoundRegion::BrAnon(var) => BoundVar::from_u32(var),
_ => bug!("bound region is not anonymous"),
}
}
}
/// N.B., if you change this, you'll probably want to change the corresponding
/// AST structure in `libsyntax/ast.rs` as well.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub enum TyKind<'tcx> {
/// The primitive boolean type. Written as `bool`.
Bool,
/// The primitive character type; holds a Unicode scalar value
/// (a non-surrogate code point). Written as `char`.
Char,
/// A primitive signed integer type. For example, `i32`.
Int(ast::IntTy),
/// A primitive unsigned integer type. For example, `u32`.
Uint(ast::UintTy),
/// A primitive floating-point type. For example, `f64`.
Float(ast::FloatTy),
/// Structures, enumerations and unions.
///
/// Substs here, possibly against intuition, *may* contain `Param`s.
/// That is, even after substitution it is possible that there are type
/// variables. This happens when the `Adt` corresponds to an ADT
/// definition and not a concrete use of it.
Adt(&'tcx AdtDef, &'tcx Substs<'tcx>),
Foreign(DefId),
/// The pointee of a string slice. Written as `str`.
Str,
/// An array with the given length. Written as `[T; n]`.
Array(Ty<'tcx>, &'tcx ty::Const<'tcx>),
/// The pointee of an array slice. Written as `[T]`.
Slice(Ty<'tcx>),
/// A raw pointer. Written as `*mut T` or `*const T`
RawPtr(TypeAndMut<'tcx>),
/// A reference; a pointer with an associated lifetime. Written as
/// `&'a mut T` or `&'a T`.
Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability),
/// The anonymous type of a function declaration/definition. Each
/// function has a unique type, which is output (for a function
/// named `foo` returning an `i32`) as `fn() -> i32 {foo}`.
///
/// For example the type of `bar` here:
///
/// ```rust
/// fn foo() -> i32 { 1 }
/// let bar = foo; // bar: fn() -> i32 {foo}
/// ```
FnDef(DefId, &'tcx Substs<'tcx>),
/// A pointer to a function. Written as `fn() -> i32`.
///
/// For example the type of `bar` here:
///
/// ```rust
/// fn foo() -> i32 { 1 }
/// let bar: fn() -> i32 = foo;
/// ```
FnPtr(PolyFnSig<'tcx>),
/// A trait, defined with `trait`.
Dynamic(Binder<&'tcx List<ExistentialPredicate<'tcx>>>, ty::Region<'tcx>),
/// The anonymous type of a closure. Used to represent the type of
/// `|a| a`.
Closure(DefId, ClosureSubsts<'tcx>),
/// The anonymous type of a generator. Used to represent the type of
/// `|a| yield a`.
Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability),
/// A type representin the types stored inside a generator.
/// This should only appear in GeneratorInteriors.
GeneratorWitness(Binder<&'tcx List<Ty<'tcx>>>),
/// The never type `!`
Never,
/// A tuple type. For example, `(i32, bool)`.
Tuple(&'tcx List<Ty<'tcx>>),
/// The projection of an associated type. For example,
/// `<T as Trait<..>>::N`.
Projection(ProjectionTy<'tcx>),
/// A placeholder type used when we do not have enough information
/// to normalize the projection of an associated type to an
/// existing concrete type. Currently only used with chalk-engine.
UnnormalizedProjection(ProjectionTy<'tcx>),
/// Opaque (`impl Trait`) type found in a return type.
/// The `DefId` comes either from
/// * the `impl Trait` ast::Ty node,
/// * or the `existential type` declaration
/// The substitutions are for the generics of the function in question.
/// After typeck, the concrete type can be found in the `types` map.
Opaque(DefId, &'tcx Substs<'tcx>),
/// A type parameter; for example, `T` in `fn f<T>(x: T) {}
Param(ParamTy),
/// Bound type variable, used only when preparing a trait query.
Bound(ty::DebruijnIndex, BoundTy),
/// A placeholder type - universally quantified higher-ranked type.
Placeholder(ty::PlaceholderType),
/// A type variable used during type checking.
Infer(InferTy),
/// A placeholder for a type which could not be computed; this is
/// propagated to avoid useless error messages.
Error,
}
// `TyKind` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
static_assert!(MEM_SIZE_OF_TY_KIND: ::std::mem::size_of::<TyKind<'_>>() == 24);
/// A closure can be modeled as a struct that looks like:
///
/// struct Closure<'l0...'li, T0...Tj, CK, CS, U0...Uk> {
/// upvar0: U0,
/// ...
/// upvark: Uk
/// }
///
/// where:
///
/// - 'l0...'li and T0...Tj are the lifetime and type parameters
/// in scope on the function that defined the closure,
/// - CK represents the *closure kind* (Fn vs FnMut vs FnOnce). This
/// is rather hackily encoded via a scalar type. See
/// `TyS::to_opt_closure_kind` for details.
/// - CS represents the *closure signature*, representing as a `fn()`
/// type. For example, `fn(u32, u32) -> u32` would mean that the closure
/// implements `CK<(u32, u32), Output = u32>`, where `CK` is the trait
/// specified above.
/// - U0...Uk are type parameters representing the types of its upvars
/// (borrowed, if appropriate; that is, if Ui represents a by-ref upvar,
/// and the up-var has the type `Foo`, then `Ui = &Foo`).
///
/// So, for example, given this function:
///
/// fn foo<'a, T>(data: &'a mut T) {
/// do(|| data.count += 1)
/// }
///
/// the type of the closure would be something like:
///
/// struct Closure<'a, T, U0> {
/// data: U0
/// }
///
/// Note that the type of the upvar is not specified in the struct.
/// You may wonder how the impl would then be able to use the upvar,
/// if it doesn't know it's type? The answer is that the impl is
/// (conceptually) not fully generic over Closure but rather tied to
/// instances with the expected upvar types:
///
/// impl<'b, 'a, T> FnMut() for Closure<'a, T, &'b mut &'a mut T> {
/// ...
/// }
///
/// You can see that the *impl* fully specified the type of the upvar
/// and thus knows full well that `data` has type `&'b mut &'a mut T`.
/// (Here, I am assuming that `data` is mut-borrowed.)
///
/// Now, the last question you may ask is: Why include the upvar types
/// as extra type parameters? The reason for this design is that the
/// upvar types can reference lifetimes that are internal to the
/// creating function. In my example above, for example, the lifetime
/// `'b` represents the scope of the closure itself; this is some
/// subset of `foo`, probably just the scope of the call to the to
/// `do()`. If we just had the lifetime/type parameters from the
/// enclosing function, we couldn't name this lifetime `'b`. Note that
/// there can also be lifetimes in the types of the upvars themselves,
/// if one of them happens to be a reference to something that the
/// creating fn owns.
///
/// OK, you say, so why not create a more minimal set of parameters
/// that just includes the extra lifetime parameters? The answer is
/// primarily that it would be hard --- we don't know at the time when
/// we create the closure type what the full types of the upvars are,
/// nor do we know which are borrowed and which are not. In this
/// design, we can just supply a fresh type parameter and figure that
/// out later.
///
/// All right, you say, but why include the type parameters from the
/// original function then? The answer is that codegen may need them
/// when monomorphizing, and they may not appear in the upvars. A
/// closure could capture no variables but still make use of some
/// in-scope type parameter with a bound (e.g., if our example above
/// had an extra `U: Default`, and the closure called `U::default()`).
///
/// There is another reason. This design (implicitly) prohibits
/// closures from capturing themselves (except via a trait
/// object). This simplifies closure inference considerably, since it
/// means that when we infer the kind of a closure or its upvars, we
/// don't have to handle cycles where the decisions we make for
/// closure C wind up influencing the decisions we ought to make for
/// closure C (which would then require fixed point iteration to
/// handle). Plus it fixes an ICE. :P
///
/// ## Generators
///
/// Perhaps surprisingly, `ClosureSubsts` are also used for
/// generators. In that case, what is written above is only half-true
/// -- the set of type parameters is similar, but the role of CK and
/// CS are different. CK represents the "yield type" and CS
/// represents the "return type" of the generator.
///
/// It'd be nice to split this struct into ClosureSubsts and
/// GeneratorSubsts, I believe. -nmatsakis
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ClosureSubsts<'tcx> {
/// Lifetime and type parameters from the enclosing function,
/// concatenated with the types of the upvars.
///
/// These are separated out because codegen wants to pass them around
/// when monomorphizing.
pub substs: &'tcx Substs<'tcx>,
}
/// Struct returned by `split()`. Note that these are subslices of the
/// parent slice and not canonical substs themselves.
struct SplitClosureSubsts<'tcx> {
closure_kind_ty: Ty<'tcx>,
closure_sig_ty: Ty<'tcx>,
upvar_kinds: &'tcx [Kind<'tcx>],
}
impl<'tcx> ClosureSubsts<'tcx> {
/// Divides the closure substs into their respective
/// components. Single source of truth with respect to the
/// ordering.
fn split(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> SplitClosureSubsts<'tcx> {
let generics = tcx.generics_of(def_id);
let parent_len = generics.parent_count;
SplitClosureSubsts {
closure_kind_ty: self.substs.type_at(parent_len),
closure_sig_ty: self.substs.type_at(parent_len + 1),
upvar_kinds: &self.substs[parent_len + 2..],
}
}
#[inline]
pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) ->
impl Iterator<Item=Ty<'tcx>> + 'tcx
{
let SplitClosureSubsts { upvar_kinds, .. } = self.split(def_id, tcx);
upvar_kinds.iter().map(|t| {
if let UnpackedKind::Type(ty) = t.unpack() {
ty
} else {
bug!("upvar should be type")
}
})
}
/// Returns the closure kind for this closure; may return a type
/// variable during inference. To get the closure kind during
/// inference, use `infcx.closure_kind(def_id, substs)`.
pub fn closure_kind_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
self.split(def_id, tcx).closure_kind_ty
}
/// Returns the type representing the closure signature for this
/// closure; may contain type variables during inference. To get
/// the closure signature during inference, use
/// `infcx.fn_sig(def_id)`.
pub fn closure_sig_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
self.split(def_id, tcx).closure_sig_ty
}
/// Returns the closure kind for this closure; only usable outside
/// of an inference context, because in that context we know that
/// there are no type variables.
///
/// If you have an inference context, use `infcx.closure_kind()`.
pub fn closure_kind(self, def_id: DefId, tcx: TyCtxt<'_, 'tcx, 'tcx>) -> ty::ClosureKind {
self.split(def_id, tcx).closure_kind_ty.to_opt_closure_kind().unwrap()
}
/// Extracts the signature from the closure; only usable outside
/// of an inference context, because in that context we know that
/// there are no type variables.
///
/// If you have an inference context, use `infcx.closure_sig()`.
pub fn closure_sig(self, def_id: DefId, tcx: TyCtxt<'_, 'tcx, 'tcx>) -> ty::PolyFnSig<'tcx> {
match self.closure_sig_ty(def_id, tcx).sty {
ty::FnPtr(sig) => sig,
ref t => bug!("closure_sig_ty is not a fn-ptr: {:?}", t),
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct GeneratorSubsts<'tcx> {
pub substs: &'tcx Substs<'tcx>,
}
struct SplitGeneratorSubsts<'tcx> {
yield_ty: Ty<'tcx>,
return_ty: Ty<'tcx>,
witness: Ty<'tcx>,
upvar_kinds: &'tcx [Kind<'tcx>],
}
impl<'tcx> GeneratorSubsts<'tcx> {
fn split(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> SplitGeneratorSubsts<'tcx> {
let generics = tcx.generics_of(def_id);
let parent_len = generics.parent_count;
SplitGeneratorSubsts {
yield_ty: self.substs.type_at(parent_len),
return_ty: self.substs.type_at(parent_len + 1),
witness: self.substs.type_at(parent_len + 2),
upvar_kinds: &self.substs[parent_len + 3..],
}
}
/// This describes the types that can be contained in a generator.
/// It will be a type variable initially and unified in the last stages of typeck of a body.
/// It contains a tuple of all the types that could end up on a generator frame.
/// The state transformation MIR pass may only produce layouts which mention types
/// in this tuple. Upvars are not counted here.
pub fn witness(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
self.split(def_id, tcx).witness
}
#[inline]
pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) ->
impl Iterator<Item=Ty<'tcx>> + 'tcx
{
let SplitGeneratorSubsts { upvar_kinds, .. } = self.split(def_id, tcx);
upvar_kinds.iter().map(|t| {
if let UnpackedKind::Type(ty) = t.unpack() {
ty
} else {
bug!("upvar should be type")
}
})
}
/// Returns the type representing the yield type of the generator.
pub fn yield_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
self.split(def_id, tcx).yield_ty
}
/// Returns the type representing the return type of the generator.
pub fn return_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
self.split(def_id, tcx).return_ty
}
/// Return the "generator signature", which consists of its yield
/// and return types.
///
/// NB. Some bits of the code prefers to see this wrapped in a
/// binder, but it never contains bound regions. Probably this
/// function should be removed.
pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> PolyGenSig<'tcx> {
ty::Binder::dummy(self.sig(def_id, tcx))
}
/// Return the "generator signature", which consists of its yield
/// and return types.
pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> GenSig<'tcx> {
ty::GenSig {
yield_ty: self.yield_ty(def_id, tcx),
return_ty: self.return_ty(def_id, tcx),
}
}
}
impl<'a, 'gcx, 'tcx> GeneratorSubsts<'tcx> {
/// This returns the types of the MIR locals which had to be stored across suspension points.
/// It is calculated in rustc_mir::transform::generator::StateTransform.
/// All the types here must be in the tuple in GeneratorInterior.
pub fn state_tys(
self,
def_id: DefId,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
) -> impl Iterator<Item=Ty<'tcx>> + Captures<'gcx> + 'a {
let state = tcx.generator_layout(def_id).fields.iter();
state.map(move |d| d.ty.subst(tcx, self.substs))
}
/// This is the types of the fields of a generate which
/// is available before the generator transformation.
/// It includes the upvars and the state discriminant which is u32.
pub fn pre_transforms_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) ->
impl Iterator<Item=Ty<'tcx>> + 'a
{
self.upvar_tys(def_id, tcx).chain(iter::once(tcx.types.u32))
}
/// This is the types of all the fields stored in a generator.
/// It includes the upvars, state types and the state discriminant which is u32.
pub fn field_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) ->
impl Iterator<Item=Ty<'tcx>> + Captures<'gcx> + 'a
{
self.pre_transforms_tys(def_id, tcx).chain(self.state_tys(def_id, tcx))
}
}
#[derive(Debug, Copy, Clone)]
pub enum UpvarSubsts<'tcx> {
Closure(ClosureSubsts<'tcx>),
Generator(GeneratorSubsts<'tcx>),
}
impl<'tcx> UpvarSubsts<'tcx> {
#[inline]
pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) ->
impl Iterator<Item=Ty<'tcx>> + 'tcx
{
let upvar_kinds = match self {
UpvarSubsts::Closure(substs) => substs.split(def_id, tcx).upvar_kinds,
UpvarSubsts::Generator(substs) => substs.split(def_id, tcx).upvar_kinds,
};
upvar_kinds.iter().map(|t| {
if let UnpackedKind::Type(ty) = t.unpack() {
ty
} else {
bug!("upvar should be type")
}
})
}
}
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum ExistentialPredicate<'tcx> {
/// e.g., Iterator
Trait(ExistentialTraitRef<'tcx>),
/// e.g., Iterator::Item = T
Projection(ExistentialProjection<'tcx>),
/// e.g., Send
AutoTrait(DefId),
}
impl<'a, 'gcx, 'tcx> ExistentialPredicate<'tcx> {
/// Compares via an ordering that will not change if modules are reordered or other changes are
/// made to the tree. In particular, this ordering is preserved across incremental compilations.
pub fn stable_cmp(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, other: &Self) -> Ordering {
use self::ExistentialPredicate::*;
match (*self, *other) {
(Trait(_), Trait(_)) => Ordering::Equal,
(Projection(ref a), Projection(ref b)) =>
tcx.def_path_hash(a.item_def_id).cmp(&tcx.def_path_hash(b.item_def_id)),
(AutoTrait(ref a), AutoTrait(ref b)) =>
tcx.trait_def(*a).def_path_hash.cmp(&tcx.trait_def(*b).def_path_hash),
(Trait(_), _) => Ordering::Less,
(Projection(_), Trait(_)) => Ordering::Greater,
(Projection(_), _) => Ordering::Less,
(AutoTrait(_), _) => Ordering::Greater,
}
}
}
impl<'a, 'gcx, 'tcx> Binder<ExistentialPredicate<'tcx>> {
pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
-> ty::Predicate<'tcx> {
use ty::ToPredicate;
match *self.skip_binder() {
ExistentialPredicate::Trait(tr) => Binder(tr).with_self_ty(tcx, self_ty).to_predicate(),
ExistentialPredicate::Projection(p) =>
ty::Predicate::Projection(Binder(p.with_self_ty(tcx, self_ty))),
ExistentialPredicate::AutoTrait(did) => {
let trait_ref = Binder(ty::TraitRef {
def_id: did,
substs: tcx.mk_substs_trait(self_ty, &[]),
});
trait_ref.to_predicate()
}
}
}
}
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx List<ExistentialPredicate<'tcx>> {}
impl<'tcx> List<ExistentialPredicate<'tcx>> {
pub fn principal(&self) -> ExistentialTraitRef<'tcx> {
match self[0] {
ExistentialPredicate::Trait(tr) => tr,
other => bug!("first predicate is {:?}", other),
}
}
#[inline]
pub fn projection_bounds<'a>(&'a self) ->
impl Iterator<Item=ExistentialProjection<'tcx>> + 'a {
self.iter().filter_map(|predicate| {
match *predicate {
ExistentialPredicate::Projection(p) => Some(p),
_ => None,
}
})
}
#[inline]
pub fn auto_traits<'a>(&'a self) -> impl Iterator<Item=DefId> + 'a {
self.iter().filter_map(|predicate| {
match *predicate {
ExistentialPredicate::AutoTrait(d) => Some(d),
_ => None
}
})
}
}
impl<'tcx> Binder<&'tcx List<ExistentialPredicate<'tcx>>> {
pub fn principal(&self) -> PolyExistentialTraitRef<'tcx> {
Binder::bind(self.skip_binder().principal())
}
#[inline]
pub fn projection_bounds<'a>(&'a self) ->
impl Iterator<Item=PolyExistentialProjection<'tcx>> + 'a {
self.skip_binder().projection_bounds().map(Binder::bind)
}
#[inline]
pub fn auto_traits<'a>(&'a self) -> impl Iterator<Item=DefId> + 'a {
self.skip_binder().auto_traits()
}
pub fn iter<'a>(&'a self)
-> impl DoubleEndedIterator<Item=Binder<ExistentialPredicate<'tcx>>> + 'tcx {
self.skip_binder().iter().cloned().map(Binder::bind)
}
}
/// A complete reference to a trait. These take numerous guises in syntax,
/// but perhaps the most recognizable form is in a where clause:
///
/// T: Foo<U>
///
/// This would be represented by a trait-reference where the def-id is the
/// def-id for the trait `Foo` and the substs define `T` as parameter 0,
/// and `U` as parameter 1.
///
/// Trait references also appear in object types like `Foo<U>`, but in
/// that case the `Self` parameter is absent from the substitutions.
///
/// Note that a `TraitRef` introduces a level of region binding, to
/// account for higher-ranked trait bounds like `T: for<'a> Foo<&'a U>`
/// or higher-ranked object types.
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct TraitRef<'tcx> {
pub def_id: DefId,
pub substs: &'tcx Substs<'tcx>,
}
impl<'tcx> TraitRef<'tcx> {
pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) -> TraitRef<'tcx> {
TraitRef { def_id: def_id, substs: substs }
}
/// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
/// are the parameters defined on trait.
pub fn identity<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> TraitRef<'tcx> {
TraitRef {
def_id,
substs: Substs::identity_for_item(tcx, def_id),
}
}
#[inline]
pub fn self_ty(&self) -> Ty<'tcx> {
self.substs.type_at(0)
}
pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator<Item = Ty<'tcx>> + 'a {
// Select only the "input types" from a trait-reference. For
// now this is all the types that appear in the
// trait-reference, but it should eventually exclude
// associated types.
self.substs.types()
}
pub fn from_method(tcx: TyCtxt<'_, '_, 'tcx>,
trait_id: DefId,
substs: &Substs<'tcx>)
-> ty::TraitRef<'tcx> {
let defs = tcx.generics_of(trait_id);
ty::TraitRef {
def_id: trait_id,
substs: tcx.intern_substs(&substs[..defs.params.len()])
}
}
}
pub type PolyTraitRef<'tcx> = Binder<TraitRef<'tcx>>;
impl<'tcx> PolyTraitRef<'tcx> {
pub fn self_ty(&self) -> Ty<'tcx> {
self.skip_binder().self_ty()
}
pub fn def_id(&self) -> DefId {
self.skip_binder().def_id
}
pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> {
// Note that we preserve binding levels
Binder(ty::TraitPredicate { trait_ref: self.skip_binder().clone() })
}
}
/// An existential reference to a trait, where `Self` is erased.
/// For example, the trait object `Trait<'a, 'b, X, Y>` is:
///
/// exists T. T: Trait<'a, 'b, X, Y>
///
/// The substitutions don't include the erased `Self`, only trait
/// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct ExistentialTraitRef<'tcx> {
pub def_id: DefId,
pub substs: &'tcx Substs<'tcx>,
}
impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> {
pub fn input_types<'b>(&'b self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> + 'b {
// Select only the "input types" from a trait-reference. For
// now this is all the types that appear in the
// trait-reference, but it should eventually exclude
// associated types.
self.substs.types()
}
pub fn erase_self_ty(tcx: TyCtxt<'a, 'gcx, 'tcx>,
trait_ref: ty::TraitRef<'tcx>)
-> ty::ExistentialTraitRef<'tcx> {
// Assert there is a Self.
trait_ref.substs.type_at(0);
ty::ExistentialTraitRef {
def_id: trait_ref.def_id,
substs: tcx.intern_substs(&trait_ref.substs[1..])
}
}
/// Object types don't have a self-type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self-type. A common choice is `mk_err()`
/// or some placeholder type.
pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
-> ty::TraitRef<'tcx> {
// otherwise the escaping vars would be captured by the binder
// debug_assert!(!self_ty.has_escaping_bound_vars());
ty::TraitRef {
def_id: self.def_id,
substs: tcx.mk_substs_trait(self_ty, self.substs)
}
}
}
pub type PolyExistentialTraitRef<'tcx> = Binder<ExistentialTraitRef<'tcx>>;
impl<'tcx> PolyExistentialTraitRef<'tcx> {
pub fn def_id(&self) -> DefId {
self.skip_binder().def_id
}
/// Object types don't have a self-type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self-type. A common choice is `mk_err()`
/// or some placeholder type.
pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>,
self_ty: Ty<'tcx>)
-> ty::PolyTraitRef<'tcx> {
self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty))
}
}
/// Binder is a binder for higher-ranked lifetimes or types. It is part of the
/// compiler's representation for things like `for<'a> Fn(&'a isize)`
/// (which would be represented by the type `PolyTraitRef ==
/// Binder<TraitRef>`). Note that when we instantiate,
/// erase, or otherwise "discharge" these bound vars, we change the
/// type from `Binder<T>` to just `T` (see
/// e.g., `liberate_late_bound_regions`).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Binder<T>(T);
impl<T> Binder<T> {
/// Wraps `value` in a binder, asserting that `value` does not
/// contain any bound vars that would be bound by the
/// binder. This is commonly used to 'inject' a value T into a
/// different binding level.
pub fn dummy<'tcx>(value: T) -> Binder<T>
where T: TypeFoldable<'tcx>
{
debug_assert!(!value.has_escaping_bound_vars());
Binder(value)
}
/// Wraps `value` in a binder, binding higher-ranked vars (if any).
pub fn bind<'tcx>(value: T) -> Binder<T> {
Binder(value)
}
/// Skips the binder and returns the "bound" value. This is a
/// risky thing to do because it's easy to get confused about
/// debruijn indices and the like. It is usually better to
/// discharge the binder using `no_bound_vars` or
/// `replace_late_bound_regions` or something like
/// that. `skip_binder` is only valid when you are either
/// extracting data that has nothing to do with bound vars, you
/// are doing some sort of test that does not involve bound
/// regions, or you are being very careful about your depth
/// accounting.
///
/// Some examples where `skip_binder` is reasonable:
///
/// - extracting the def-id from a PolyTraitRef;
/// - comparing the self type of a PolyTraitRef to see if it is equal to
/// a type parameter `X`, since the type `X` does not reference any regions
pub fn | (&self) -> &T {
&self.0
}
pub fn as_ref(&self) -> Binder<&T> {
Binder(&self.0)
}
pub fn map_bound_ref<F, U>(&self, f: F) -> Binder<U>
where F: FnOnce(&T) -> U
{
self.as_ref().map_bound(f)
}
pub fn map_bound<F, U>(self, f: F) -> Binder<U>
where F: FnOnce(T) -> U
{
Binder(f(self.0))
}
/// Unwraps and returns the value within, but only if it contains
/// no bound vars at all. (In other words, if this binder --
/// and indeed any enclosing binder -- doesn't bind anything at
/// all.) Otherwise, returns `None`.
///
/// (One could imagine having a method that just unwraps a single
/// binder, but permits late-bound vars bound by enclosing
/// binders, but that would require adjusting the debruijn
/// indices, and given the shallow binding structure we often use,
/// would not be that useful.)
pub fn no_bound_vars<'tcx>(self) -> Option<T>
where T: TypeFoldable<'tcx>
{
if self.skip_binder().has_escaping_bound_vars() {
None
} else {
Some(self.skip_binder().clone())
}
}
/// Given two things that have the same binder level,
/// and an operation that wraps on their contents, execute the operation
/// and then wrap its result.
///
/// `f` should consider bound regions at depth 1 to be free, and
/// anything it produces with bound regions at depth 1 will be
/// bound in the resulting return value.
pub fn fuse<U,F,R>(self, u: Binder<U>, f: F) -> Binder<R>
where F: FnOnce(T, U) -> R
{
Binder(f(self.0, u.0))
}
/// Split the contents into two things that share the same binder
/// level as the original, returning two distinct binders.
///
/// `f` should consider bound regions at depth 1 to be free, and
/// anything it produces with bound regions at depth 1 will be
/// bound in the resulting return values.
pub fn split<U,V,F>(self, f: F) -> (Binder<U>, Binder<V>)
where F: FnOnce(T) -> (U, V)
{
let (u, v) = f(self.0);
(Binder(u), Binder(v))
}
}
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ProjectionTy<'tcx> {
/// The parameters of the associated item.
pub substs: &'tcx Substs<'tcx>,
/// The `DefId` of the `TraitItem` for the associated type `N`.
///
/// Note that this is not the `DefId` of the `TraitRef` containing this
/// associated type, which is in `tcx.associated_item(item_def_id).container`.
pub item_def_id: DefId,
}
impl<'a, 'tcx> ProjectionTy<'tcx> {
/// Construct a `ProjectionTy` by searching the trait from `trait_ref` for the
/// associated item named `item_name`.
pub fn from_ref_and_name(
tcx: TyCtxt<'_, '_, '_>, trait_ref: ty::TraitRef<'tcx>, item_name: Ident
) -> ProjectionTy<'tcx> {
let item_def_id = tcx.associated_items(trait_ref.def_id).find(|item| {
item.kind == ty::AssociatedKind::Type &&
tcx.hygienic_eq(item_name, item.ident, trait_ref.def_id)
}).unwrap().def_id;
ProjectionTy {
substs: trait_ref.substs,
item_def_id,
}
}
/// Extracts the underlying trait reference from this projection.
/// For example, if this is a projection of `<T as Iterator>::Item`,
/// then this function would return a `T: Iterator` trait reference.
pub fn trait_ref(&self, tcx: TyCtxt<'_, '_, '_>) -> ty::TraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
ty::TraitRef {
def_id,
substs: self.substs,
}
}
pub fn self_ty(&self) -> Ty<'tcx> {
self.substs.type_at(0)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct GenSig<'tcx> {
pub yield_ty: Ty<'tcx>,
pub return_ty: Ty<'tcx>,
}
pub type PolyGenSig<'tcx> = Binder<GenSig<'tcx>>;
impl<'tcx> PolyGenSig<'tcx> {
pub fn yield_ty(&self) -> ty::Binder<Ty<'tcx>> {
self.map_bound_ref(|sig| sig.yield_ty)
}
pub fn return_ty(&self) -> ty::Binder<Ty<'tcx>> {
self.map_bound_ref(|sig| sig.return_ty)
}
}
/// Signature of a function type, which I have arbitrarily
/// decided to use to refer to the input/output types.
///
/// - `inputs` is the list of arguments and their modes.
/// - `output` is the return type.
/// - `variadic` indicates whether this is a variadic function. (only true for foreign fns)
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct FnSig<'tcx> {
pub inputs_and_output: &'tcx List<Ty<'tcx>>,
pub variadic: bool,
pub unsafety: hir::Unsafety,
pub abi: abi::Abi,
}
impl<'tcx> FnSig<'tcx> {
pub fn inputs(&self) -> &'tcx [Ty<'tcx>] {
&self.inputs_and_output[..self.inputs_and_output.len() - 1]
}
pub fn output(&self) -> Ty<'tcx> {
self.inputs_and_output[self.inputs_and_output.len() - 1]
}
}
pub type PolyFnSig<'tcx> = Binder<FnSig<'tcx>>;
impl<'tcx> PolyFnSig<'tcx> {
#[inline]
pub fn inputs(&self) -> Binder<&'tcx [Ty<'tcx>]> {
self.map_bound_ref(|fn_sig| fn_sig.inputs())
}
#[inline]
pub fn input(&self, index: usize) -> ty::Binder<Ty<'tcx>> {
self.map_bound_ref(|fn_sig| fn_sig.inputs()[index])
}
pub fn inputs_and_output(&self) -> ty::Binder<&'tcx List<Ty<'tcx>>> {
self.map_bound_ref(|fn_sig| fn_sig.inputs_and_output)
}
#[inline]
pub fn output(&self) -> ty::Binder<Ty<'tcx>> {
self.map_bound_ref(|fn_sig| fn_sig.output())
}
pub fn variadic(&self) -> bool {
self.skip_binder().variadic
}
pub fn unsafety(&self) -> hir::Unsafety {
self.skip_binder().unsafety
}
pub fn abi(&self) -> abi::Abi {
self.skip_binder().abi
}
}
pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<FnSig<'tcx>>>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct ParamTy {
pub idx: u32,
pub name: InternedString,
}
impl<'a, 'gcx, 'tcx> ParamTy {
pub fn new(index: u32, name: InternedString) -> ParamTy {
ParamTy { idx: index, name: name }
}
pub fn for_self() -> ParamTy {
ParamTy::new(0, keywords::SelfUpper.name().as_interned_str())
}
pub fn for_def(def: &ty::GenericParamDef) -> ParamTy {
ParamTy::new(def.index, def.name)
}
pub fn to_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
tcx.mk_ty_param(self.idx, self.name)
}
pub fn is_self(&self) -> bool {
// FIXME(#50125): Ignoring `Self` with `idx != 0` might lead to weird behavior elsewhere,
// but this should only be possible when using `-Z continue-parse-after-error` like
// `compile-fail/issue-36638.rs`.
self.name == keywords::SelfUpper.name().as_str() && self.idx == 0
}
}
/// A [De Bruijn index][dbi] is a standard means of representing
/// regions (and perhaps later types) in a higher-ranked setting. In
/// particular, imagine a type like this:
///
/// for<'a> fn(for<'b> fn(&'b isize, &'a isize), &'a char)
/// ^ ^ | | |
/// | | | | |
/// | +------------+ 0 | |
/// | | |
/// +--------------------------------+ 1 |
/// | |
/// +------------------------------------------+ 0
///
/// In this type, there are two binders (the outer fn and the inner
/// fn). We need to be able to determine, for any given region, which
/// fn type it is bound by, the inner or the outer one. There are
/// various ways you can do this, but a De Bruijn index is one of the
/// more convenient and has some nice properties. The basic idea is to
/// count the number of binders, inside out. Some examples should help
/// clarify what I mean.
///
/// Let's start with the reference type `&'b isize` that is the first
/// argument to the inner function. This region `'b` is assigned a De
/// Bruijn index of 0, meaning "the innermost binder" (in this case, a
/// fn). The region `'a` that appears in the second argument type (`&'a
/// isize`) would then be assigned a De Bruijn index of 1, meaning "the
/// second-innermost binder". (These indices are written on the arrays
/// in the diagram).
///
/// What is interesting is that De Bruijn index attached to a particular
/// variable will vary depending on where it appears. For example,
/// the final type `&'a char` also refers to the region `'a` declared on
/// the outermost fn. But this time, this reference is not nested within
/// any other binders (i.e., it is not an argument to the inner fn, but
/// rather the outer one). Therefore, in this case, it is assigned a
/// De Bruijn index of 0, because the innermost binder in that location
/// is the outer fn.
///
/// [dbi]: http://en.wikipedia.org/wiki/De_Bruijn_index
newtype_index! {
pub struct DebruijnIndex {
DEBUG_FORMAT = "DebruijnIndex({})",
const INNERMOST = 0,
}
}
pub type Region<'tcx> = &'tcx RegionKind;
/// Representation of regions.
///
/// Unlike types, most region variants are "fictitious", not concrete,
/// regions. Among these, `ReStatic`, `ReEmpty` and `ReScope` are the only
/// ones representing concrete regions.
///
/// ## Bound Regions
///
/// These are regions that are stored behind a binder and must be substituted
/// with some concrete region before being used. There are 2 kind of
/// bound regions: early-bound, which are bound in an item's Generics,
/// and are substituted by a Substs, and late-bound, which are part of
/// higher-ranked types (e.g., `for<'a> fn(&'a ())`) and are substituted by
/// the likes of `liberate_late_bound_regions`. The distinction exists
/// because higher-ranked lifetimes aren't supported in all places. See [1][2].
///
/// Unlike Param-s, bound regions are not supposed to exist "in the wild"
/// outside their binder, e.g., in types passed to type inference, and
/// should first be substituted (by placeholder regions, free regions,
/// or region variables).
///
/// ## Placeholder and Free Regions
///
/// One often wants to work with bound regions without knowing their precise
/// identity. For example, when checking a function, the lifetime of a borrow
/// can end up being assigned to some region parameter. In these cases,
/// it must be ensured that bounds on the region can't be accidentally
/// assumed without being checked.
///
/// To do this, we replace the bound regions with placeholder markers,
/// which don't satisfy any relation not explicitly provided.
///
/// There are 2 kinds of placeholder regions in rustc: `ReFree` and
/// `RePlaceholder`. When checking an item's body, `ReFree` is supposed
/// to be used. These also support explicit bounds: both the internally-stored
/// *scope*, which the region is assumed to outlive, as well as other
/// relations stored in the `FreeRegionMap`. Note that these relations
/// aren't checked when you `make_subregion` (or `eq_types`), only by
/// `resolve_regions_and_report_errors`.
///
/// When working with higher-ranked types, some region relations aren't
/// yet known, so you can't just call `resolve_regions_and_report_errors`.
/// `RePlaceholder` is designed for this purpose. In these contexts,
/// there's also the risk that some inference variable laying around will
/// get unified with your placeholder region: if you want to check whether
/// `for<'a> Foo<'_>: 'a`, and you substitute your bound region `'a`
/// with a placeholder region `'%a`, the variable `'_` would just be
/// instantiated to the placeholder region `'%a`, which is wrong because
/// the inference variable is supposed to satisfy the relation
/// *for every value of the placeholder region*. To ensure that doesn't
/// happen, you can use `leak_check`. This is more clearly explained
/// by the [rustc guide].
///
/// [1]: http://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/
/// [2]: http://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/
/// [rustc guide]: https://rust-lang.github.io/rustc-guide/traits/hrtb.html
#[derive(Clone, PartialEq, Eq, Hash, Copy, RustcEncodable, RustcDecodable, PartialOrd, Ord)]
pub enum RegionKind {
// Region bound in a type or fn declaration which will be
// substituted 'early' -- that is, at the same time when type
// parameters are substituted.
ReEarlyBound(EarlyBoundRegion),
// Region bound in a function scope, which will be substituted when the
// function is called.
ReLateBound(DebruijnIndex, BoundRegion),
/// When checking a function body, the types of all arguments and so forth
/// that refer to bound region parameters are modified to refer to free
/// region parameters.
ReFree(FreeRegion),
/// A concrete region naming some statically determined scope
/// (e.g., an expression or sequence of statements) within the
/// current function.
ReScope(region::Scope),
/// Static data that has an "infinite" lifetime. Top in the region lattice.
ReStatic,
/// A region variable. Should not exist after typeck.
ReVar(RegionVid),
/// A placeholder region - basically the higher-ranked version of ReFree.
/// Should not exist after typeck.
RePlaceholder(ty::PlaceholderRegion),
/// Empty lifetime is for data that is never accessed.
/// Bottom in the region lattice. We treat ReEmpty somewhat
/// specially; at least right now, we do not generate instances of
/// it during the GLB computations, but rather
/// generate an error instead. This is to improve error messages.
/// The only way to get an instance of ReEmpty is to have a region
/// variable with no constraints.
ReEmpty,
/// Erased region, used by trait selection, in MIR and during codegen.
ReErased,
/// These are regions bound in the "defining type" for a
/// closure. They are used ONLY as part of the
/// `ClosureRegionRequirements` that are produced by MIR borrowck.
/// See `ClosureRegionRequirements` for more details.
ReClosureBound(RegionVid),
}
impl<'tcx> serialize::UseSpecializedDecodable for Region<'tcx> {}
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug, PartialOrd, Ord)]
pub struct EarlyBoundRegion {
pub def_id: DefId,
pub index: u32,
pub name: InternedString,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct TyVid {
pub index: u32,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct IntVid {
pub index: u32,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct FloatVid {
pub index: u32,
}
newtype_index! {
pub struct RegionVid {
DEBUG_FORMAT = custom,
}
}
impl Atom for RegionVid {
fn index(self) -> usize {
Idx::index(self)
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub enum InferTy {
TyVar(TyVid),
IntVar(IntVid),
FloatVar(FloatVid),
/// A `FreshTy` is one that is generated as a replacement for an
/// unbound type variable. This is convenient for caching etc. See
/// `infer::freshen` for more details.
FreshTy(u32),
FreshIntTy(u32),
FreshFloatTy(u32),
}
newtype_index! {
pub struct BoundVar { .. }
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct BoundTy {
pub var: BoundVar,
pub kind: BoundTyKind,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub enum BoundTyKind {
Anon,
Param(InternedString),
}
impl_stable_hash_for!(struct BoundTy { var, kind });
impl_stable_hash_for!(enum self::BoundTyKind { Anon, Param(a) });
impl From<BoundVar> for BoundTy {
fn from(var: BoundVar) -> Self {
BoundTy {
var,
kind: BoundTyKind::Anon,
}
}
}
/// A `ProjectionPredicate` for an `ExistentialTraitRef`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ExistentialProjection<'tcx> {
pub item_def_id: DefId,
pub substs: &'tcx Substs<'tcx>,
pub ty: Ty<'tcx>,
}
pub type PolyExistentialProjection<'tcx> = Binder<ExistentialProjection<'tcx>>;
impl<'a, 'tcx, 'gcx> ExistentialProjection<'tcx> {
/// Extracts the underlying existential trait reference from this projection.
/// For example, if this is a projection of `exists T. <T as Iterator>::Item == X`,
/// then this function would return a `exists T. T: Iterator` existential trait
/// reference.
pub fn trait_ref(&self, tcx: TyCtxt<'_, '_, '_>) -> ty::ExistentialTraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
ty::ExistentialTraitRef{
def_id,
substs: self.substs,
}
}
pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
self_ty: Ty<'tcx>)
-> ty::ProjectionPredicate<'tcx>
{
// otherwise the escaping regions would be captured by the binders
debug_assert!(!self_ty.has_escaping_bound_vars());
ty::ProjectionPredicate {
projection_ty: ty::ProjectionTy {
item_def_id: self.item_def_id,
substs: tcx.mk_substs_trait(self_ty, self.substs),
},
ty: self.ty,
}
}
}
impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> {
pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
-> ty::PolyProjectionPredicate<'tcx> {
self.map_bound(|p| p.with_self_ty(tcx, self_ty))
}
pub fn item_def_id(&self) -> DefId {
return self.skip_binder().item_def_id;
}
}
impl DebruijnIndex {
/// Returns the resulting index when this value is moved into
/// `amount` number of new binders. So e.g., if you had
///
/// for<'a> fn(&'a x)
///
/// and you wanted to change to
///
/// for<'a> fn(for<'b> fn(&'a x))
///
/// you would need to shift the index for `'a` into a new binder.
#[must_use]
pub fn shifted_in(self, amount: u32) -> DebruijnIndex {
DebruijnIndex::from_u32(self.as_u32() + amount)
}
/// Update this index in place by shifting it "in" through
/// `amount` number of binders.
pub fn shift_in(&mut self, amount: u32) {
*self = self.shifted_in(amount);
}
/// Returns the resulting index when this value is moved out from
/// `amount` number of new binders.
#[must_use]
pub fn shifted_out(self, amount: u32) -> DebruijnIndex {
DebruijnIndex::from_u32(self.as_u32() - amount)
}
/// Update in place by shifting out from `amount` binders.
pub fn shift_out(&mut self, amount: u32) {
*self = self.shifted_out(amount);
}
/// Adjusts any Debruijn Indices so as to make `to_binder` the
/// innermost binder. That is, if we have something bound at `to_binder`,
/// it will now be bound at INNERMOST. This is an appropriate thing to do
/// when moving a region out from inside binders:
///
/// ```
/// for<'a> fn(for<'b> for<'c> fn(&'a u32), _)
/// // Binder: D3 D2 D1 ^^
/// ```
///
/// Here, the region `'a` would have the debruijn index D3,
/// because it is the bound 3 binders out. However, if we wanted
/// to refer to that region `'a` in the second argument (the `_`),
/// those two binders would not be in scope. In that case, we
/// might invoke `shift_out_to_binder(D3)`. This would adjust the
/// debruijn index of `'a` to D1 (the innermost binder).
///
/// If we invoke `shift_out_to_binder` and the region is in fact
/// bound by one of the binders we are shifting out of, that is an
/// error (and should fail an assertion failure).
pub fn shifted_out_to_binder(self, to_binder: DebruijnIndex) -> Self {
self.shifted_out(to_binder.as_u32() - INNERMOST.as_u32())
}
}
impl_stable_hash_for!(struct DebruijnIndex { private });
/// Region utilities
impl RegionKind {
/// Is this region named by the user?
pub fn has_name(&self) -> bool {
match *self {
RegionKind::ReEarlyBound(ebr) => ebr.has_name(),
RegionKind::ReLateBound(_, br) => br.is_named(),
RegionKind::ReFree(fr) => fr.bound_region.is_named(),
RegionKind::ReScope(..) => false,
RegionKind::ReStatic => true,
RegionKind::ReVar(..) => false,
RegionKind::RePlaceholder(placeholder) => placeholder.name.is_named(),
RegionKind::ReEmpty => false,
RegionKind::ReErased => false,
RegionKind::ReClosureBound(..) => false,
}
}
pub fn is_late_bound(&self) -> bool {
match *self {
ty::ReLateBound(..) => true,
_ => false,
}
}
pub fn bound_at_or_above_binder(&self, index: DebruijnIndex) -> bool {
match *self {
ty::ReLateBound(debruijn, _) => debruijn >= index,
_ => false,
}
}
/// Adjusts any Debruijn Indices so as to make `to_binder` the
/// innermost binder. That is, if we have something bound at `to_binder`,
/// it will now be bound at INNERMOST. This is an appropriate thing to do
/// when moving a region out from inside binders:
///
/// ```
/// for<'a> fn(for<'b> for<'c> fn(&'a u32), _)
/// // Binder: D3 D2 D1 ^^
/// ```
///
/// Here, the region `'a` would have the debruijn index D3,
/// because it is the bound 3 binders out. However, if we wanted
/// to refer to that region `'a` in the second argument (the `_`),
/// those two binders would not be in scope. In that case, we
/// might invoke `shift_out_to_binder(D3)`. This would adjust the
/// debruijn index of `'a` to D1 (the innermost binder).
///
/// If we invoke `shift_out_to_binder` and the region is in fact
/// bound by one of the binders we are shifting out of, that is an
/// error (and should fail an assertion failure).
pub fn shifted_out_to_binder(&self, to_binder: ty::DebruijnIndex) -> RegionKind {
match *self {
ty::ReLateBound(debruijn, r) => ty::ReLateBound(
debruijn.shifted_out_to_binder(to_binder),
r,
),
r => r
}
}
pub fn keep_in_local_tcx(&self) -> bool {
if let ty::ReVar(..) = self {
true
} else {
false
}
}
pub fn type_flags(&self) -> TypeFlags {
let mut flags = TypeFlags::empty();
if self.keep_in_local_tcx() {
flags = flags | TypeFlags::KEEP_IN_LOCAL_TCX;
}
match *self {
ty::ReVar(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_RE_INFER;
}
ty::RePlaceholder(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_RE_PLACEHOLDER;
}
ty::ReLateBound(..) => {
flags = flags | TypeFlags::HAS_RE_LATE_BOUND;
}
ty::ReEarlyBound(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_RE_EARLY_BOUND;
}
ty::ReEmpty |
ty::ReStatic |
ty::ReFree { .. } |
ty::ReScope { .. } => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
}
ty::ReErased => {
}
ty::ReClosureBound(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
}
}
match *self {
ty::ReStatic | ty::ReEmpty | ty::ReErased | ty::ReLateBound(..) => (),
_ => flags = flags | TypeFlags::HAS_FREE_LOCAL_NAMES,
}
debug!("type_flags({:?}) = {:?}", self, flags);
flags
}
/// Given an early-bound or free region, returns the def-id where it was bound.
/// For example, consider the regions in this snippet of code:
///
/// ```
/// impl<'a> Foo {
/// ^^ -- early bound, declared on an impl
///
/// fn bar<'b, 'c>(x: &self, y: &'b u32, z: &'c u64) where 'static: 'c
/// ^^ ^^ ^ anonymous, late-bound
/// | early-bound, appears in where-clauses
/// late-bound, appears only in fn args
/// {..}
/// }
/// ```
///
/// Here, `free_region_binding_scope('a)` would return the def-id
/// of the impl, and for all the other highlighted regions, it
/// would return the def-id of the function. In other cases (not shown), this
/// function might return the def-id of a closure.
pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId {
match self {
ty::ReEarlyBound(br) => {
tcx.parent_def_id(br.def_id).unwrap()
}
ty::ReFree(fr) => fr.scope,
_ => bug!("free_region_binding_scope invoked on inappropriate region: {:?}", self),
}
}
}
/// Type utilities
impl<'a, 'gcx, 'tcx> TyS<'tcx> {
pub fn is_unit(&self) -> bool {
match self.sty {
Tuple(ref tys) => tys.is_empty(),
_ => false,
}
}
pub fn is_never(&self) -> bool {
match self.sty {
Never => true,
_ => false,
}
}
pub fn is_primitive(&self) -> bool {
match self.sty {
Bool | Char | Int(_) | Uint(_) | Float(_) => true,
_ => false,
}
}
#[inline]
pub fn is_ty_var(&self) -> bool {
match self.sty {
Infer(TyVar(_)) => true,
_ => false,
}
}
pub fn is_ty_infer(&self) -> bool {
match self.sty {
Infer(_) => true,
_ => false,
}
}
pub fn is_phantom_data(&self) -> bool {
if let Adt(def, _) = self.sty {
def.is_phantom_data()
} else {
false
}
}
pub fn is_bool(&self) -> bool { self.sty == Bool }
pub fn is_param(&self, index: u32) -> bool {
match self.sty {
ty::Param(ref data) => data.idx == index,
_ => false,
}
}
pub fn is_self(&self) -> bool {
match self.sty {
Param(ref p) => p.is_self(),
_ => false,
}
}
pub fn is_slice(&self) -> bool {
match self.sty {
RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => match ty.sty {
Slice(_) | Str => true,
_ => false,
},
_ => false
}
}
#[inline]
pub fn is_simd(&self) -> bool {
match self.sty {
Adt(def, _) => def.repr.simd(),
_ => false,
}
}
pub fn sequence_element_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
match self.sty {
Array(ty, _) | Slice(ty) => ty,
Str => tcx.mk_mach_uint(ast::UintTy::U8),
_ => bug!("sequence_element_type called on non-sequence value: {}", self),
}
}
pub fn simd_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
match self.sty {
Adt(def, substs) => {
def.non_enum_variant().fields[0].ty(tcx, substs)
}
_ => bug!("simd_type called on invalid type")
}
}
pub fn simd_size(&self, _cx: TyCtxt<'_, '_, '_>) -> usize {
match self.sty {
Adt(def, _) => def.non_enum_variant().fields.len(),
_ => bug!("simd_size called on invalid type")
}
}
pub fn is_region_ptr(&self) -> bool {
match self.sty {
Ref(..) => true,
_ => false,
}
}
pub fn is_mutable_pointer(&self) -> bool {
match self.sty {
RawPtr(TypeAndMut { mutbl: hir::Mutability::MutMutable, .. }) |
Ref(_, _, hir::Mutability::MutMutable) => true,
_ => false
}
}
pub fn is_unsafe_ptr(&self) -> bool {
match self.sty {
RawPtr(_) => return true,
_ => return false,
}
}
/// Returns `true` if this type is an `Arc<T>`.
pub fn is_arc(&self) -> bool {
match self.sty {
Adt(def, _) => def.is_arc(),
_ => false,
}
}
/// Returns `true` if this type is an `Rc<T>`.
pub fn is_rc(&self) -> bool {
match self.sty {
Adt(def, _) => def.is_rc(),
_ => false,
}
}
pub fn is_box(&self) -> bool {
match self.sty {
Adt(def, _) => def.is_box(),
_ => false,
}
}
/// panics if called on any type other than `Box<T>`
pub fn boxed_ty(&self) -> Ty<'tcx> {
match self.sty {
Adt(def, substs) if def.is_box() => substs.type_at(0),
_ => bug!("`boxed_ty` is called on non-box type {:?}", self),
}
}
/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
pub fn is_scalar(&self) -> bool {
match self.sty {
Bool | Char | Int(_) | Float(_) | Uint(_) |
Infer(IntVar(_)) | Infer(FloatVar(_)) |
FnDef(..) | FnPtr(_) | RawPtr(_) => true,
_ => false
}
}
/// Returns true if this type is a floating point type and false otherwise.
pub fn is_floating_point(&self) -> bool {
match self.sty {
Float(_) |
Infer(FloatVar(_)) => true,
_ => false,
}
}
pub fn is_trait(&self) -> bool {
match self.sty {
Dynamic(..) => true,
_ => false,
}
}
pub fn is_enum(&self) -> bool {
match self.sty {
Adt(adt_def, _) => {
adt_def.is_enum()
}
_ => false,
}
}
pub fn is_closure(&self) -> bool {
match self.sty {
Closure(..) => true,
_ => false,
}
}
pub fn is_generator(&self) -> bool {
match self.sty {
Generator(..) => true,
_ => false,
}
}
#[inline]
pub fn is_integral(&self) -> bool {
match self.sty {
Infer(IntVar(_)) | Int(_) | Uint(_) => true,
_ => false
}
}
pub fn is_fresh_ty(&self) -> bool {
match self.sty {
Infer(FreshTy(_)) => true,
_ => false,
}
}
pub fn is_fresh(&self) -> bool {
match self.sty {
Infer(FreshTy(_)) => true,
Infer(FreshIntTy(_)) => true,
Infer(FreshFloatTy(_)) => true,
_ => false,
}
}
pub fn is_char(&self) -> bool {
match self.sty {
Char => true,
_ => false,
}
}
#[inline]
pub fn is_fp(&self) -> bool {
match self.sty {
Infer(FloatVar(_)) | Float(_) => true,
_ => false
}
}
pub fn is_numeric(&self) -> bool {
self.is_integral() || self.is_fp()
}
pub fn is_signed(&self) -> bool {
match self.sty {
Int(_) => true,
_ => false,
}
}
pub fn is_pointer_sized(&self) -> bool {
match self.sty {
Int(ast::IntTy::Isize) | Uint(ast::UintTy::Usize) => true,
_ => false,
}
}
pub fn is_machine(&self) -> bool {
match self.sty {
Int(ast::IntTy::Isize) | Uint(ast::UintTy::Usize) => false,
Int(..) | Uint(..) | Float(..) => true,
_ => false,
}
}
pub fn has_concrete_skeleton(&self) -> bool {
match self.sty {
Param(_) | Infer(_) | Error => false,
_ => true,
}
}
/// Returns the type and mutability of `*ty`.
///
/// The parameter `explicit` indicates if this is an *explicit* dereference.
/// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly.
pub fn builtin_deref(&self, explicit: bool) -> Option<TypeAndMut<'tcx>> {
match self.sty {
Adt(def, _) if def.is_box() => {
Some(TypeAndMut {
ty: self.boxed_ty(),
mutbl: hir::MutImmutable,
})
},
Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl }),
RawPtr(mt) if explicit => Some(mt),
_ => None,
}
}
/// Returns the type of `ty[i]`.
pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
match self.sty {
Array(ty, _) | Slice(ty) => Some(ty),
_ => None,
}
}
pub fn fn_sig(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PolyFnSig<'tcx> {
match self.sty {
FnDef(def_id, substs) => {
tcx.fn_sig(def_id).subst(tcx, substs)
}
FnPtr(f) => f,
_ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self)
}
}
pub fn is_fn(&self) -> bool {
match self.sty {
FnDef(..) | FnPtr(_) => true,
_ => false,
}
}
pub fn is_impl_trait(&self) -> bool {
match self.sty {
Opaque(..) => true,
_ => false,
}
}
#[inline]
pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> {
match self.sty {
Adt(adt, _) => Some(adt),
_ => None,
}
}
/// Push onto `out` the regions directly referenced from this type (but not
/// types reachable from this type via `walk_tys`). This ignores late-bound
/// regions binders.
pub fn push_regions(&self, out: &mut SmallVec<[ty::Region<'tcx>; 4]>) {
match self.sty {
Ref(region, _, _) => {
out.push(region);
}
Dynamic(ref obj, region) => {
out.push(region);
out.extend(obj.principal().skip_binder().substs.regions());
}
Adt(_, substs) | Opaque(_, substs) => {
out.extend(substs.regions())
}
Closure(_, ClosureSubsts { ref substs }) |
Generator(_, GeneratorSubsts { ref substs }, _) => {
out.extend(substs.regions())
}
Projection(ref data) | UnnormalizedProjection(ref data) => {
out.extend(data.substs.regions())
}
FnDef(..) |
FnPtr(_) |
GeneratorWitness(..) |
Bool |
Char |
Int(_) |
Uint(_) |
Float(_) |
Str |
Array(..) |
Slice(_) |
RawPtr(_) |
Never |
Tuple(..) |
Foreign(..) |
Param(_) |
Bound(..) |
Placeholder(..) |
Infer(_) |
Error => {}
}
}
/// When we create a closure, we record its kind (i.e., what trait
/// it implements) into its `ClosureSubsts` using a type
/// parameter. This is kind of a phantom type, except that the
/// most convenient thing for us to are the integral types. This
/// function converts such a special type into the closure
/// kind. To go the other way, use
/// `tcx.closure_kind_ty(closure_kind)`.
///
/// Note that during type checking, we use an inference variable
/// to represent the closure kind, because it has not yet been
/// inferred. Once upvar inference (in `src/librustc_typeck/check/upvar.rs`)
/// is complete, that type variable will be unified.
pub fn to_opt_closure_kind(&self) -> Option<ty::ClosureKind> {
match self.sty {
Int(int_ty) => match int_ty {
ast::IntTy::I8 => Some(ty::ClosureKind::Fn),
ast::IntTy::I16 => Some(ty::ClosureKind::FnMut),
ast::IntTy::I32 => Some(ty::ClosureKind::FnOnce),
_ => bug!("cannot convert type `{:?}` to a closure kind", self),
},
Infer(_) => None,
Error => Some(ty::ClosureKind::Fn),
_ => bug!("cannot convert type `{:?}` to a closure kind", self),
}
}
/// Fast path helper for testing if a type is `Sized`.
///
/// Returning true means the type is known to be sized. Returning
/// `false` means nothing -- could be sized, might not be.
pub fn is_trivially_sized(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> bool {
match self.sty {
ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) |
ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Float(_) |
ty::FnDef(..) | ty::FnPtr(_) | ty::RawPtr(..) |
ty::Char | ty::Ref(..) | ty::Generator(..) |
ty::GeneratorWitness(..) | ty::Array(..) | ty::Closure(..) |
ty::Never | ty::Error =>
true,
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) =>
false,
ty::Tuple(tys) =>
tys.iter().all(|ty| ty.is_trivially_sized(tcx)),
ty::Adt(def, _substs) =>
def.sized_constraint(tcx).is_empty(),
ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false,
ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
ty::Infer(ty::TyVar(_)) => false,
ty::Bound(..) |
ty::Placeholder(..) |
ty::Infer(ty::FreshTy(_)) |
ty::Infer(ty::FreshIntTy(_)) |
ty::Infer(ty::FreshFloatTy(_)) =>
bug!("is_trivially_sized applied to unexpected type: {:?}", self),
}
}
}
/// Typed constant value.
#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)]
pub struct Const<'tcx> {
pub ty: Ty<'tcx>,
pub val: ConstValue<'tcx>,
}
impl<'tcx> Const<'tcx> {
pub fn unevaluated(
tcx: TyCtxt<'_, '_, 'tcx>,
def_id: DefId,
substs: &'tcx Substs<'tcx>,
ty: Ty<'tcx>,
) -> &'tcx Self {
tcx.mk_const(Const {
val: ConstValue::Unevaluated(def_id, substs),
ty,
})
}
#[inline]
pub fn from_const_value(
tcx: TyCtxt<'_, '_, 'tcx>,
val: ConstValue<'tcx>,
ty: Ty<'tcx>,
) -> &'tcx Self {
tcx.mk_const(Const {
val,
ty,
})
}
#[inline]
pub fn from_scalar(
tcx: TyCtxt<'_, '_, 'tcx>,
val: Scalar,
ty: Ty<'tcx>,
) -> &'tcx Self {
Self::from_const_value(tcx, ConstValue::Scalar(val), ty)
}
#[inline]
pub fn from_bits(
tcx: TyCtxt<'_, '_, 'tcx>,
bits: u128,
ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> &'tcx Self {
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).unwrap_or_else(|e| {
panic!("could not compute layout for {:?}: {:?}", ty, e)
}).size;
let shift = 128 - size.bits();
let truncated = (bits << shift) >> shift;
assert_eq!(truncated, bits, "from_bits called with untruncated value");
Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value)
}
#[inline]
pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
Self::from_scalar(tcx, Scalar::Bits { bits: 0, size: 0 }, ty)
}
#[inline]
pub fn from_bool(tcx: TyCtxt<'_, '_, 'tcx>, v: bool) -> &'tcx Self {
Self::from_bits(tcx, v as u128, ParamEnv::empty().and(tcx.types.bool))
}
#[inline]
pub fn from_usize(tcx: TyCtxt<'_, '_, 'tcx>, n: u64) -> &'tcx Self {
Self::from_bits(tcx, n as u128, ParamEnv::empty().and(tcx.types.usize))
}
#[inline]
pub fn to_bits(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Option<u128> {
if self.ty != ty.value {
return None;
}
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
self.val.try_to_bits(size)
}
#[inline]
pub fn to_ptr(&self) -> Option<Pointer> {
self.val.try_to_ptr()
}
#[inline]
pub fn assert_bits(
&self,
tcx: TyCtxt<'_, '_, '_>,
ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Option<u128> {
assert_eq!(self.ty, ty.value);
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
self.val.try_to_bits(size)
}
#[inline]
pub fn assert_bool(&self, tcx: TyCtxt<'_, '_, '_>) -> Option<bool> {
self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.bool)).and_then(|v| match v {
0 => Some(false),
1 => Some(true),
_ => None,
})
}
#[inline]
pub fn assert_usize(&self, tcx: TyCtxt<'_, '_, '_>) -> Option<u64> {
self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.usize)).map(|v| v as u64)
}
#[inline]
pub fn unwrap_bits(
&self,
tcx: TyCtxt<'_, '_, '_>,
ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> u128 {
self.assert_bits(tcx, ty).unwrap_or_else(||
bug!("expected bits of {}, got {:#?}", ty.value, self))
}
#[inline]
pub fn unwrap_usize(&self, tcx: TyCtxt<'_, '_, '_>) -> u64 {
self.assert_usize(tcx).unwrap_or_else(||
bug!("expected constant usize, got {:#?}", self))
}
}
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Const<'tcx> {}
| skip_binder |
error.py | """
PythonAEM error, contains a message and PythonAEM Result object
"""
class Error(RuntimeError):
"""
PythonAEM error, contains a message and PythonAEM Result object
useful for debugging the result and response when an error occurs
"""
def __init__(self, message, result):
"""
Initialise a result.
:param message: result message
:param resi;t: PythonAEM Result
:return PythonAEM Error instance
"""
super().__init__()
self.message = message | self.result = result |
|
issue-20055-box-unsized-array.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2005: Check that boxed fixed-size arrays are properly
// accounted for (namely, only deallocated if they were actually
// created) when they appear as temporaries in unused arms of a match
// expression.
pub fn foo(box_1: fn () -> Box<[i8; 1]>,
box_2: fn () -> Box<[i8; 20]>,
box_3: fn () -> Box<[i8; 300]>,
box_4: fn () -> Box<[i8; 4000]>,
) {
println!("Hello World 1");
let _: Box<[i8]> = match 3 {
1 => box_1(),
2 => box_2(),
3 => box_3(),
_ => box_4(),
};
println!("Hello World 2");
}
pub fn main() {
fn box_1() -> Box<[i8; 1]> { Box::new( [1i8] ) }
fn | () -> Box<[i8; 20]> { Box::new( [1i8; 20] ) }
fn box_3() -> Box<[i8; 300]> { Box::new( [1i8; 300] ) }
fn box_4() -> Box<[i8; 4000]> { Box::new( [1i8; 4000] ) }
foo(box_1, box_2, box_3, box_4);
}
| box_2 |
new_user.py | """
class NewUser:
def __init__(self, fName, mName, lName, nickName, photo, title, company, address, hTel, mTel, wTel, eMail, homepage, sAddress, sHome, sNotes):
self.fName = fName
self.mName = mName
self.lName = lName
self.nickName = nickName
self.photo = photo
self.title = title
self.company = company | self.mTel = mTel
self.wTel = wTel
self.eMail = eMail
self.homepage = homepage
self.birthday = birthday
self.sAddress = sAddress
self.sHome = sHome
self.sNotes = sNotes
""" | self.address = address
self.hTel = hTel |
mixed.py | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/022_data.mixed.ipynb (unless otherwise specified).
__all__ = ['MixedDataLoader', 'MixedDataLoaders', 'get_mixed_dls']
# Cell
from ..imports import *
# Cell
# This implementation of a mixed dataloader is based on a great implementation created by Zach Mueller in this fastai thread:
# https://forums.fast.ai/t/combining-tabular-images-in-fastai2-and-should-work-with-almost-any-other-type/73197
from packaging import version
from fastai.data.load import _FakeLoader
from torch.utils.data.dataloader import _MultiProcessingDataLoaderIter, _SingleProcessDataLoaderIter, _DatasetKind
_loaders = (_MultiProcessingDataLoaderIter, _SingleProcessDataLoaderIter)
class MixedDataLoader():
def __init__(self, *loaders, path='.', shuffle=False, device=None, bs=None):
"Accepts any number of `DataLoader` and a device"
self.path = path
device = ifnone(device, default_device())
self.device = device
self.c = None
self.d = None
self.bs = ifnone(bs, min([dl.bs for dl in loaders]))
for i, dl in enumerate(loaders): # ensure all dls have the same bs
if hasattr(dl, 'vars'):
self.vars = dl.vars
if hasattr(dl, 'len'):
self.len = dl.len
if hasattr(dl, 'split_idxs'):
self.split_idxs = dl.split_idxs
dl.bs = self.bs
dl.shuffle_fn = self.shuffle_fn
if self.c is None and hasattr(dl, "c"):
self.c = dl.c
if self.d is None and hasattr(dl, "d"):
self.d = dl.d
if i == 0:
self.dataset = dl.dataset
dl.to(device=device)
self.shuffle = shuffle
if not self.shuffle:
self.rng = np.arange(len(self.dataset)).tolist()
self.loaders = loaders
self.count = 0
self.fake_l = _FakeLoader(self, False, 0, 0, 0) if version.parse(
fastai.__version__) >= version.parse("2.1") else _FakeLoader(self, False, 0, 0)
if sum([len(dl.dataset) for dl in loaders]) > 0:
self._get_idxs() # Do not apply on an empty dataset
def new(self, *args, **kwargs):
loaders = [dl.new(*args, **kwargs) for dl in self.loaders]
return type(self)(*loaders, path=self.path, device=self.device)
# def __len__(self): return len(self.loaders[0])
def __len__(self): return self.loaders[0].__len__()
def _get_vals(self, x):
"Checks for duplicates in batches"
idxs, new_x = [], []
for i, o in enumerate(x):
x[i] = o.cpu().numpy().flatten()
for idx, o in enumerate(x):
if not self._arrayisin(o, new_x):
idxs.append(idx)
new_x.append(o)
return idxs
def _get_idxs(self):
"Get `x` and `y` indices for batches of data"
self.n_inps = [dl.n_inp for dl in self.loaders]
self.x_idxs = self._split_idxs(self.n_inps)
# Identify duplicate targets
dl_dict = dict(zip(range(0, len(self.loaders)), self.n_inps))
outs = L([])
for key, n_inp in dl_dict.items():
b = next(iter(self.loaders[key]))
outs += L(b[n_inp:])
self.y_idxs = self._get_vals(outs)
def __iter__(self):
z = zip(*[_loaders[i.fake_l.num_workers == 0](i.fake_l) for i in self.loaders])
for b in z:
inps = []
outs = []
if self.device is not None:
b = to_device(b, self.device)
for batch, dl in zip(b, self.loaders): | if hasattr(dl, 'input_idxs'): self.input_idxs = dl.input_idxs
batch = dl.after_batch(batch)
inps += batch[:dl.n_inp]
outs += batch[dl.n_inp:]
inps = tuple([tuple(L(inps)[idx]) if isinstance(idx, list) else inps[idx]
for idx in self.x_idxs]) if len(self.x_idxs) > 1 else tuple(L(outs)[self.x_idxs][0])
outs = tuple(L(outs)[self.y_idxs]) if len(self.y_idxs) > 1 else L(outs)[self.y_idxs][0]
yield inps, outs
def one_batch(self):
"Grab one batch of data"
with self.fake_l.no_multiproc():
res = first(self)
if hasattr(self, 'it'):
delattr(self, 'it')
return res
def shuffle_fn(self, idxs):
"Generate the same idxs for all dls in each batch when shuffled"
if self.count == 0:
self.shuffled_idxs = np.random.permutation(idxs)
# sort each batch
for i in range(len(self.shuffled_idxs)//self.bs + 1):
self.shuffled_idxs[i*self.bs:(i+1)*self.bs] = np.sort(self.shuffled_idxs[i*self.bs:(i+1)*self.bs])
self.count += 1
if self.count == len(self.loaders):
self.count = 0
return self.shuffled_idxs
def show_batch(self):
"Show a batch of data"
for dl in self.loaders:
dl.show_batch()
def to(self, device): self.device = device
def _arrayisin(self, arr, arr_list):
"Checks if `arr` is in `arr_list`"
for a in arr_list:
if np.array_equal(arr, a):
return True
return False
def _split_idxs(self, a):
a_cum = np.array(a).cumsum().tolist()
b = np.arange(sum(a)).tolist()
start = 0
b_ = []
for i, idx in enumerate(range(len(a))):
end = a_cum[i]
b_.append(b[start:end] if end - start > 1 else b[start])
start = end
return b_
class MixedDataLoaders(DataLoaders):
pass
# Cell
def get_mixed_dls(*dls, device=None, shuffle_train=None, shuffle_valid=None, **kwargs):
_mixed_train_dls = []
_mixed_valid_dls = []
for dl in dls:
_mixed_train_dls.append(dl.train)
_mixed_valid_dls.append(dl.valid)
if shuffle_train is None: shuffle_train = dl.train.shuffle
if shuffle_valid is None: shuffle_valid = dl.valid.shuffle
if device is None: device = dl.train.device
mixed_train_dl = MixedDataLoader(*_mixed_train_dls, shuffle=shuffle_train, **kwargs)
mixed_valid_dl = MixedDataLoader(*_mixed_valid_dls, shuffle=shuffle_valid, **kwargs)
mixed_dls = MixedDataLoaders(mixed_train_dl, mixed_valid_dl, device=device)
return mixed_dls | if hasattr(dl, 'idxs'): self.idxs = dl.idxs |
name_generator.go | package generator
import (
"crypto/rand"
"fmt"
"math/big"
)
var (
left = [...]string{
"admiring",
"adoring",
"affectionate",
"agitated",
"amazing",
"angry",
"awesome",
"beautiful",
"blissful",
"bold",
"boring",
"brave",
"busy",
"charming",
"clever",
"cool",
"compassionate",
"competent",
"condescending",
"confident",
"cranky",
"crazy",
"dazzling",
"determined",
"distracted",
"dreamy",
"eager",
"ecstatic",
"elastic",
"elated",
"elegant",
"eloquent",
"epic",
"exciting",
"fervent",
"festive",
"flamboyant",
"focused",
"friendly",
"frosty",
"funny",
"gallant",
"gifted",
"goofy",
"gracious",
"great",
"happy",
"hardcore",
"heuristic",
"hopeful",
"hungry",
"infallible",
"inspiring",
"interesting",
"intelligent",
"jolly",
"jovial",
"keen",
"kind",
"laughing",
"loving",
"lucid",
"magical",
"mystifying",
"modest",
"musing",
"naughty",
"nervous",
"nice",
"nifty",
"nostalgic",
"objective",
"optimistic",
"peaceful",
"pedantic",
"pensive",
"practical",
"priceless",
"quirky",
"quizzical",
"recursing",
"relaxed",
"reverent",
"romantic",
"sad",
"serene",
"sharp",
"silly",
"sleepy",
"stoic",
"strange",
"stupefied",
"suspicious",
"sweet",
"tender",
"thirsty",
"trusting",
"unruffled",
"upbeat",
"vibrant",
"vigilant",
"vigorous",
"wizardly",
"wonderful",
"xenodochial",
"youthful",
"zealous",
"zen",
}
right = [...]string{
"karolina",
"rafal",
"krzysztof",
"michal",
"mateusz",
"tomasz",
"marcin",
"damian",
"filip",
"artur",
"karol",
"maciej",
}
)
// GenerateName generates a random name from the list of adjectives and names of some kyma creators
// formatted as "adjective-name". For example 'quizzical_rafal'. If isSuffix is true, a random
// integer between 0 and 10 will be added to the end of the name, e.g `focused_filip3`
func GenerateName(isSuffix bool) (string, error) | {
adjIndex, err := rand.Int(rand.Reader, big.NewInt(int64(len(left))))
if err != nil {
return "", err
}
nameIndex, err := rand.Int(rand.Reader, big.NewInt(int64(len(right))))
if err != nil {
return "", err
}
name := fmt.Sprintf("%s-%s", left[adjIndex.Int64()], right[nameIndex.Int64()])
if isSuffix {
index, err := rand.Int(rand.Reader, big.NewInt(10))
if err != nil {
return "", err
}
name = fmt.Sprintf("%s%d", name, index.Int64())
}
return name, nil
} |
|
__init__.py | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
import warnings
from google.cloud.monitoring_dashboard.v1 import types
from google.cloud.monitoring_dashboard.v1.gapic import dashboards_service_client
from google.cloud.monitoring_dashboard.v1.gapic import enums
if sys.version_info[:2] == (2, 7):
message = (
"A future version of this library will drop support for Python 2.7."
"More details about Python 2 support for Google Cloud Client Libraries"
"can be found at https://cloud.google.com/python/docs/python2-sunset/"
)
warnings.warn(message, DeprecationWarning)
class | (dashboards_service_client.DashboardsServiceClient):
__doc__ = dashboards_service_client.DashboardsServiceClient.__doc__
enums = enums
__all__ = ("enums", "types", "DashboardsServiceClient")
| DashboardsServiceClient |
0004_auto_20181224_1636.py | # Generated by Django 2.1.4 on 2018-12-24 16:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0003_game_description_html'),
]
| model_name='game',
name='description',
field=models.CharField(blank=True, max_length=1000, null=True),
),
] | operations = [
migrations.AlterField( |
snapshot_fault.py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def SnapshotFault(vim, *args, **kwargs):
| '''Base type for Snapshot-related errors.'''
obj = vim.client.factory.create('{urn:vim25}SnapshotFault')
# do some validation checking...
if (len(args) + len(kwargs)) < 4:
raise IndexError('Expected at least 5 arguments got: %d' % len(args))
required = [ 'dynamicProperty', 'dynamicType', 'faultCause', 'faultMessage' ]
optional = [ ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj |
|
test_grid_data_source.py | """
Tests of GridDataSource behavior.
"""
import unittest
from numpy import array
from numpy.testing import assert_array_equal
from chaco.api import GridDataSource
from traits.testing.unittest_tools import UnittestTools
class GridDataSourceTestCase(UnittestTools, unittest.TestCase):
def setUp(self):
|
def test_empty(self):
data_source = GridDataSource()
self.assertEqual(data_source.sort_order, ('none', 'none'))
self.assertEqual(data_source.index_dimension, 'image')
self.assertEqual(data_source.value_dimension, 'scalar')
self.assertEqual(data_source.metadata,
{"selections":[], "annotations":[]})
xdata, ydata = data_source.get_data()
assert_array_equal(xdata.get_data(), array([]))
assert_array_equal(ydata.get_data(), array([]))
self.assertEqual(data_source.get_bounds(), ((0,0),(0,0)))
def test_init(self):
test_xd = array([1, 2, 3])
test_yd = array([1.5, 0.5, -0.5, -1.5])
test_sort_order = ('ascending', 'descending')
self.assertEqual(self.data_source.sort_order, test_sort_order)
xd, yd = self.data_source.get_data()
assert_array_equal(xd.get_data(), test_xd)
assert_array_equal(yd.get_data(), test_yd)
self.assertEqual(self.data_source.get_bounds(),
((min(test_xd),min(test_yd)),
(max(test_xd),max(test_yd))))
def test_set_data(self):
test_xd = array([0,2,4])
test_yd = array([0,1,2,3,4,5])
test_sort_order = ('none', 'none')
self.data_source.set_data(xdata=test_xd, ydata=test_yd,
sort_order=('none', 'none'))
self.assertEqual(self.data_source.sort_order, test_sort_order)
xd, yd = self.data_source.get_data()
assert_array_equal(xd.get_data(), test_xd)
assert_array_equal(yd.get_data(), test_yd)
self.assertEqual(self.data_source.get_bounds(),
((min(test_xd),min(test_yd)),
(max(test_xd),max(test_yd))))
def test_metadata(self):
self.assertEqual(self.data_source.metadata,
{'annotations': [], 'selections': []})
def test_metadata_changed(self):
with self.assertTraitChanges(self.data_source, 'metadata_changed', count=1):
self.data_source.metadata = {'new_metadata': True}
def test_metadata_items_changed(self):
with self.assertTraitChanges(self.data_source, 'metadata_changed', count=1):
self.data_source.metadata['new_metadata'] = True
| self.data_source = GridDataSource(
xdata=array([1, 2, 3]),
ydata=array([1.5, 0.5, -0.5, -1.5]),
sort_order=('ascending', 'descending')) |
configure.py | #!/usr/bin/env python3
#
# This file is open source software, licensed to you under the terms
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
# distributed with this work for additional information regarding copyright
# ownership. You may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os, os.path, textwrap, argparse, sys, shlex, subprocess, tempfile, re
configure_args = str.join(' ', [shlex.quote(x) for x in sys.argv[1:]])
tempfile.tempdir = "./build/tmp"
srcdir = os.getcwd()
def get_flags():
with open('/proc/cpuinfo') as f:
for line in f:
if line.strip():
if line.rstrip('\n').startswith('flags'):
return re.sub(r'^flags\s+: ', '', line).split()
def add_tristate(arg_parser, name, dest, help):
arg_parser.add_argument('--enable-' + name, dest = dest, action = 'store_true', default = None,
help = 'Enable ' + help)
arg_parser.add_argument('--disable-' + name, dest = dest, action = 'store_false', default = None,
help = 'Disable ' + help)
def apply_tristate(var, test, note, missing):
if (var is None) or var:
if test():
return True
elif var == True:
print(missing)
sys.exit(1)
else:
print(note)
return False
return False
#
# dpdk_cflags - fetch the DPDK specific CFLAGS
#
# Run a simple makefile that "includes" the DPDK main makefile and prints the
# MACHINE_CFLAGS value
#
def dpdk_cflags (dpdk_target):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
dpdk_target = os.path.abspath(dpdk_target)
dpdk_target = re.sub(r'\/+$', '', dpdk_target)
dpdk_sdk_path = os.path.dirname(dpdk_target)
dpdk_target_name = os.path.basename(dpdk_target)
dpdk_arch = dpdk_target_name.split('-')[0]
if args.dpdk:
dpdk_sdk_path = 'dpdk'
dpdk_target = os.getcwd() + '/build/dpdk'
dpdk_target_name = 'x86_64-{}-linuxapp-gcc'.format(dpdk_machine)
dpdk_arch = 'x86_64'
sfile.file.write(bytes('include ' + dpdk_sdk_path + '/mk/rte.vars.mk' + "\n", 'utf-8'))
sfile.file.write(bytes('all:' + "\n\t", 'utf-8'))
sfile.file.write(bytes('@echo $(MACHINE_CFLAGS)' + "\n", 'utf-8'))
sfile.file.flush()
dpdk_cflags = subprocess.check_output(['make', '--no-print-directory',
'-f', sfile.name,
'RTE_SDK=' + dpdk_sdk_path,
'RTE_OUTPUT=' + dpdk_target,
'RTE_TARGET=' + dpdk_target_name,
'RTE_SDK_BIN=' + dpdk_target,
'RTE_ARCH=' + dpdk_arch])
dpdk_cflags_str = dpdk_cflags.decode('utf-8')
dpdk_cflags_str = re.sub(r'\n+$', '', dpdk_cflags_str)
dpdk_cflags_final = ''
return dpdk_cflags_str
def try_compile(compiler, source = '', flags = []):
return try_compile_and_link(compiler, source, flags = flags + ['-c'])
def ensure_tmp_dir_exists():
if not os.path.exists(tempfile.tempdir):
os.makedirs(tempfile.tempdir)
def try_compile_and_link(compiler, source = '', flags = []):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
ofile = tempfile.mktemp()
try:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
# We can't write to /dev/null, since in some cases (-ftest-coverage) gcc will create an auxiliary
# output file based on the name of the output file, and "/dev/null.gcsa" is not a good name
return subprocess.call([compiler, '-x', 'c++', '-o', ofile, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL,
stderr = subprocess.DEVNULL) == 0
finally:
if os.path.exists(ofile):
os.unlink(ofile)
def try_compile_and_run(compiler, flags, source, env = {}):
ensure_tmp_dir_exists()
mktemp = tempfile.NamedTemporaryFile
with mktemp() as sfile, mktemp(mode='rb') as xfile:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
xfile.file.close()
if subprocess.call([compiler, '-x', 'c++', '-o', xfile.name, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL) != 0:
# The compiler may delete the target on failure, and lead to
# NamedTemporaryFile's destructor throwing an exception.
open(xfile.name, 'a').close()
return False
e = os.environ.copy()
e.update(env)
env = e
return subprocess.call([xfile.name], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL, env=env) == 0
def warning_supported(warning, compiler):
# gcc ignores -Wno-x even if it is not supported
adjusted = re.sub('^-Wno-', '-W', warning)
return try_compile(flags = [adjusted, '-Werror'], compiler = compiler)
def debug_flag(compiler):
src_with_auto = textwrap.dedent('''\
template <typename T>
struct x { auto f() {} };
x<int> a;
''')
if try_compile(source = src_with_auto, flags = ['-g', '-std=gnu++1y'], compiler = compiler):
return '-g'
else:
print('Note: debug information disabled; upgrade your compiler')
return ''
def sanitize_vptr_flag(compiler):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67258
if (not try_compile(compiler, flags=['-fsanitize=vptr'])
or (try_compile_and_run(compiler, flags=['-fsanitize=undefined', '-fno-sanitize-recover'],
env={'UBSAN_OPTIONS': 'exitcode=1'}, source=textwrap.dedent('''
struct A
{
virtual ~A() {}
};
struct B : virtual A {};
struct C : virtual A {};
struct D : B, virtual C {};
int main()
{
D d;
}
'''))
and False)): # -fsanitize=vptr is broken even when the test above passes
return ''
else:
print('Notice: -fsanitize=vptr is broken, disabling; some debug mode tests are bypassed.')
return '-fno-sanitize=vptr'
def adjust_visibility_flags(compiler):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947
flags = ['-fvisibility=hidden', '-std=gnu++1y', '-Werror=attributes']
if not try_compile(compiler, flags=flags, source=textwrap.dedent('''
template <class T>
class MyClass {
public:
MyClass() {
auto outer = [this] ()
{
auto fn = [this] { };
//use fn for something here
};
}
};
int main() {
MyClass<int> r;
}
''')):
print('Notice: disabling -Wattributes due to https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947')
return '-Wno-attributes'
else:
return ''
modes = {
'debug': {
'sanitize': '-fsanitize=address -fsanitize=leak -fsanitize=undefined',
'sanitize_libs': '-lasan -lubsan',
'opt': '-O0 -DDEBUG -DDEBUG_SHARED_PTR -DDEFAULT_ALLOCATOR -DSEASTAR_THREAD_STACK_GUARDS -DNO_EXCEPTION_HACK',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Debug',
},
'release': {
'sanitize': '',
'sanitize_libs': '',
'opt': '-O2 -DNDEBUG -DDEFAULT_ALLOCATOR',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Release',
},
}
tests = [
'tests/fileiotest',
'tests/directory_test',
'tests/linecount',
'tests/echotest',
'tests/l3_test',
'tests/ip_test',
'tests/timertest',
'tests/tcp_test',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/smp_test',
'tests/thread_test',
'tests/thread_context_switch',
'tests/udp_server',
'tests/udp_client',
'tests/blkdiscard_test',
'tests/sstring_test',
'tests/unwind_test',
'tests/defer_test',
'tests/httpd',
'tests/memcached/test_ascii_parser',
'tests/tcp_sctp_server',
'tests/tcp_sctp_client',
'tests/allocator_test',
'tests/output_stream_test',
'tests/udp_zero_copy',
'tests/shared_ptr_test',
'tests/weak_ptr_test',
'tests/checked_ptr_test',
'tests/slab_test',
'tests/fstream_test',
'tests/distributed_test',
'tests/rpc',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/packet_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/rpc_test',
'tests/connect_test',
'tests/chunked_fifo_test',
'tests/circular_buffer_test',
'tests/perf/perf_fstream',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
'tests/program_options_test',
'tests/tuple_utils_test',
'tests/tls_echo_server',
'tests/tls_simple_client',
'tests/circular_buffer_fixed_capacity_test',
'tests/noncopyable_function_test',
]
apps = [
'apps/httpd/httpd',
'apps/seawreck/seawreck',
'apps/fair_queue_tester/fair_queue_tester',
'apps/memcached/memcached',
'apps/iotune/iotune',
'tests/scheduling_group_demo',
]
all_artifacts = apps + tests + ['libseastar.a', 'seastar.pc']
arg_parser = argparse.ArgumentParser('Configure seastar')
arg_parser.add_argument('--static', dest = 'static', action = 'store_const', default = '',
const = '-static',
help = 'Static link (useful for running on hosts outside the build environment)')
arg_parser.add_argument('--embedded-static', dest = 'embedded_static', action = 'store_const', default = '',
const = '-embedded-static',
help = 'Static link with -fPIC (can be embedded in a shared library)')
arg_parser.add_argument('--pie', dest = 'pie', action = 'store_true',
help = 'Build position-independent executable (PIE)')
arg_parser.add_argument('--so', dest = 'so', action = 'store_true',
help = 'Build shared object (SO) instead of executable')
arg_parser.add_argument('--mode', action='store', choices=list(modes.keys()) + ['all'], default='all')
arg_parser.add_argument('--with', dest='artifacts', action='append', choices=all_artifacts, default=[])
arg_parser.add_argument('--cflags', action = 'store', dest = 'user_cflags', default = '',
help = 'Extra flags for the C++ compiler')
arg_parser.add_argument('--ldflags', action = 'store', dest = 'user_ldflags', default = '',
help = 'Extra flags for the linker')
arg_parser.add_argument('--compiler', action = 'store', dest = 'cxx', default = 'g++',
help = 'C++ compiler path')
arg_parser.add_argument('--c-compiler', action='store', dest='cc', default='gcc',
help = 'C compiler path (for bundled libraries such as dpdk and c-ares)')
arg_parser.add_argument('--with-osv', action = 'store', dest = 'with_osv', default = '',
help = 'Shortcut for compile for OSv')
arg_parser.add_argument('--enable-dpdk', action = 'store_true', dest = 'dpdk', default = False,
help = 'Enable dpdk (from included dpdk sources)')
arg_parser.add_argument('--dpdk-target', action = 'store', dest = 'dpdk_target', default = '',
help = 'Path to DPDK SDK target location (e.g. <DPDK SDK dir>/x86_64-native-linuxapp-gcc)')
arg_parser.add_argument('--debuginfo', action = 'store', dest = 'debuginfo', type = int, default = 1,
help = 'Enable(1)/disable(0)compiler debug information generation')
arg_parser.add_argument('--tests-debuginfo', action='store', dest='tests_debuginfo', type=int, default=0,
help='Enable(1)/disable(0)compiler debug information generation for tests')
arg_parser.add_argument('--static-stdc++', dest = 'staticcxx', action = 'store_true',
help = 'Link libgcc and libstdc++ statically')
arg_parser.add_argument('--static-boost', dest = 'staticboost', action = 'store_true',
help = 'Link with boost statically')
add_tristate(arg_parser, name = 'hwloc', dest = 'hwloc', help = 'hwloc support')
arg_parser.add_argument('--enable-gcc6-concepts', dest='gcc6_concepts', action='store_true', default=False,
help='enable experimental support for C++ Concepts as implemented in GCC 6')
add_tristate(arg_parser, name = 'exception-scalability-workaround', dest='exception_workaround',
help='disabling override of dl_iterate_phdr symbol to workaround C++ exception scalability issues')
arg_parser.add_argument('--allocator-page-size', dest='allocator_page_size', type=int, help='override allocator page size')
args = arg_parser.parse_args()
libnet = [
'net/proxy.cc',
'net/virtio.cc',
'net/dpdk.cc',
'net/ip.cc',
'net/ethernet.cc',
'net/arp.cc',
'net/native-stack.cc',
'net/ip_checksum.cc',
'net/udp.cc',
'net/tcp.cc',
'net/dhcp.cc',
'net/tls.cc',
'net/dns.cc',
]
core = [
'core/reactor.cc',
'core/systemwide_memory_barrier.cc',
'core/fstream.cc',
'core/posix.cc',
'core/memory.cc',
'core/resource.cc',
'core/scollectd.cc',
'core/metrics.cc',
'core/app-template.cc',
'core/thread.cc',
'core/dpdk_rte.cc',
'core/fsqual.cc',
'util/conversions.cc',
'util/program-options.cc',
'util/log.cc',
'util/backtrace.cc',
'net/packet.cc',
'net/posix-stack.cc',
'net/net.cc',
'net/stack.cc',
'net/inet_address.cc',
'rpc/rpc.cc',
'rpc/lz4_compressor.cc',
'core/exception_hacks.cc',
]
protobuf = [
'proto/metrics2.proto',
]
#prometheus = [
# 'core/prometheus.cc',
# ]
http = ['http/transformers.cc',
'http/json_path.cc',
'http/file_handler.cc',
'http/common.cc',
'http/routes.cc',
'json/json_elements.cc',
'json/formatter.cc',
'http/matcher.cc',
'http/mime_types.cc',
'http/httpd.cc',
'http/reply.cc',
'http/request_parser.rl',
'http/api_docs.cc',
]
boost_test_lib = [
'tests/test-utils.cc',
'tests/test_runner.cc',
]
def maybe_static(flag, libs):
if flag and not args.static:
libs = '-Wl,-Bstatic {} -Wl,-Bdynamic'.format(libs)
return libs
defines = ['FMT_HEADER_ONLY']
# Include -lgcc_s before -lunwind to work around for https://savannah.nongnu.org/bugs/?48486. See https://github.com/scylladb/scylla/issues/1725.
libs = ' '.join(['-laio',
maybe_static(args.staticboost,
'-lboost_program_options -lboost_system -lboost_filesystem'),
'-lstdc++ -lm',
maybe_static(args.staticboost, '-lboost_thread'),
'-lcryptopp -lrt -lgnutls -lgnutlsxx -llz4 -lprotobuf -ldl -lgcc_s -lunwind',
])
boost_unit_test_lib = maybe_static(args.staticboost, '-lboost_unit_test_framework')
hwloc_libs = '-lhwloc -lnuma -lpciaccess -lxml2 -lz'
if args.gcc6_concepts:
defines.append('HAVE_GCC6_CONCEPTS')
args.user_cflags += ' -fconcepts'
if not apply_tristate(args.exception_workaround, test = lambda: not args.staticcxx and not args.static,
note = "Note: disabling exception scalability workaround due to static linkage of libgcc and libstdc++",
missing = "Error: cannot enable exception scalability workaround with static linkage of libgcc and libstdc++"):
defines.append('NO_EXCEPTION_HACK')
if args.staticcxx:
libs = libs.replace('-lstdc++', '')
libs += ' -static-libgcc -static-libstdc++'
if args.staticcxx or args.static:
defines.append("NO_EXCEPTION_INTERCEPT");
memcache_base = [
'apps/memcached/ascii.rl'
] + libnet + core
deps = {
'libseastar.a' : core + libnet + http, # + protobuf + prometheus,
'seastar.pc': [],
'apps/httpd/httpd': ['apps/httpd/demo.json', 'apps/httpd/main.cc'] + http + libnet + core,
'apps/memcached/memcached': ['apps/memcached/memcache.cc'] + memcache_base,
'tests/memcached/test_ascii_parser': ['tests/memcached/test_ascii_parser.cc'] + memcache_base,
'tests/fileiotest': ['tests/fileiotest.cc'] + core,
'tests/directory_test': ['tests/directory_test.cc'] + core,
'tests/linecount': ['tests/linecount.cc'] + core,
'tests/echotest': ['tests/echotest.cc'] + core + libnet,
'tests/l3_test': ['tests/l3_test.cc'] + core + libnet,
'tests/ip_test': ['tests/ip_test.cc'] + core + libnet,
'tests/tcp_test': ['tests/tcp_test.cc'] + core + libnet,
'tests/timertest': ['tests/timertest.cc'] + core,
'tests/futures_test': ['tests/futures_test.cc'] + core,
'tests/alloc_test': ['tests/alloc_test.cc'] + core,
'tests/foreign_ptr_test': ['tests/foreign_ptr_test.cc'] + core,
'tests/semaphore_test': ['tests/semaphore_test.cc'] + core,
'tests/expiring_fifo_test': ['tests/expiring_fifo_test.cc'] + core,
'tests/smp_test': ['tests/smp_test.cc'] + core,
'tests/thread_test': ['tests/thread_test.cc'] + core,
'tests/thread_context_switch': ['tests/thread_context_switch.cc'] + core,
'tests/udp_server': ['tests/udp_server.cc'] + core + libnet,
'tests/udp_client': ['tests/udp_client.cc'] + core + libnet,
'tests/tcp_sctp_server': ['tests/tcp_sctp_server.cc'] + core + libnet,
'tests/tcp_sctp_client': ['tests/tcp_sctp_client.cc'] + core + libnet,
'tests/tls_test': ['tests/tls_test.cc'] + core + libnet,
'tests/fair_queue_test': ['tests/fair_queue_test.cc'] + core,
'apps/seawreck/seawreck': ['apps/seawreck/seawreck.cc', 'http/http_response_parser.rl'] + core + libnet,
'apps/fair_queue_tester/fair_queue_tester': ['apps/fair_queue_tester/fair_queue_tester.cc'] + core,
'apps/iotune/iotune': ['apps/iotune/iotune.cc'] + ['core/resource.cc', 'core/fsqual.cc'],
'tests/blkdiscard_test': ['tests/blkdiscard_test.cc'] + core,
'tests/sstring_test': ['tests/sstring_test.cc'] + core,
'tests/unwind_test': ['tests/unwind_test.cc'] + core,
'tests/defer_test': ['tests/defer_test.cc'] + core,
'tests/httpd': ['tests/httpd.cc'] + http + core,
'tests/allocator_test': ['tests/allocator_test.cc'] + core,
'tests/output_stream_test': ['tests/output_stream_test.cc'] + core + libnet,
'tests/udp_zero_copy': ['tests/udp_zero_copy.cc'] + core + libnet,
'tests/shared_ptr_test': ['tests/shared_ptr_test.cc'] + core,
'tests/weak_ptr_test': ['tests/weak_ptr_test.cc'] + core,
'tests/checked_ptr_test': ['tests/checked_ptr_test.cc'] + core,
'tests/slab_test': ['tests/slab_test.cc'] + core,
'tests/fstream_test': ['tests/fstream_test.cc'] + core,
'tests/distributed_test': ['tests/distributed_test.cc'] + core,
'tests/rpc': ['tests/rpc.cc'] + core + libnet,
'tests/rpc_test': ['tests/rpc_test.cc'] + core + libnet,
'tests/packet_test': ['tests/packet_test.cc'] + core + libnet,
'tests/connect_test': ['tests/connect_test.cc'] + core + libnet,
'tests/chunked_fifo_test': ['tests/chunked_fifo_test.cc'] + core,
'tests/circular_buffer_test': ['tests/circular_buffer_test.cc'] + core,
'tests/perf/perf_fstream': ['tests/perf/perf_fstream.cc'] + core,
'tests/json_formatter_test': ['tests/json_formatter_test.cc'] + core + http,
'tests/dns_test': ['tests/dns_test.cc'] + core + libnet,
'tests/execution_stage_test': ['tests/execution_stage_test.cc'] + core,
'tests/lowres_clock_test': ['tests/lowres_clock_test.cc'] + core,
'tests/program_options_test': ['tests/program_options_test.cc'] + core,
'tests/tuple_utils_test': ['tests/tuple_utils_test.cc'],
'tests/tls_echo_server': ['tests/tls_echo_server.cc'] + core + libnet,
'tests/tls_simple_client': ['tests/tls_simple_client.cc'] + core + libnet,
'tests/circular_buffer_fixed_capacity_test': ['tests/circular_buffer_fixed_capacity_test.cc'],
'tests/scheduling_group_demo': ['tests/scheduling_group_demo.cc'] + core,
'tests/noncopyable_function_test': ['tests/noncopyable_function_test.cc'],
}
boost_tests = [ | 'tests/memcached/test_ascii_parser',
'tests/fileiotest',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/thread_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/httpd',
'tests/output_stream_test',
'tests/fstream_test',
'tests/rpc_test',
'tests/connect_test',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
]
for bt in boost_tests:
deps[bt] += boost_test_lib
warnings = [
'-Wno-mismatched-tags', # clang-only
'-Wno-pessimizing-move', # clang-only: moving a temporary object prevents copy elision
'-Wno-redundant-move', # clang-only: redundant move in return statement
'-Wno-inconsistent-missing-override', # clang-only: 'x' overrides a member function but is not marked 'override'
'-Wno-unused-private-field', # clang-only: private field 'x' is not used
'-Wno-unknown-attributes', # clang-only: unknown attribute 'x' ignored (x in this case is gnu::externally_visible)
'-Wno-unneeded-internal-declaration', # clang-only: 'x' function 'x' declared in header file shouldb e declared 'x'
'-Wno-undefined-inline', # clang-only: inline function 'x' is not defined
'-Wno-overloaded-virtual', # clang-only: 'x' hides overloaded virtual functions
'-Wno-maybe-uninitialized',
'-Wno-sign-compare',
]
# The "--with-osv=<path>" parameter is a shortcut for a bunch of other
# settings:
if args.with_osv:
args.so = True
args.hwloc = False
args.user_cflags = (args.user_cflags +
' -DDEFAULT_ALLOCATOR -fvisibility=default -DHAVE_OSV -I' +
args.with_osv + ' -I' + args.with_osv + '/include -I' +
args.with_osv + '/arch/x64')
if args.allocator_page_size:
args.user_cflags += ' -DSEASTAR_OVERRIDE_ALLOCATOR_PAGE_SIZE=' + str(args.allocator_page_size)
dpdk_arch_xlat = {
'native': 'native',
'nehalem': 'nhm',
'westmere': 'wsm',
'sandybridge': 'snb',
'ivybridge': 'ivb',
}
dpdk_machine = 'native'
if args.dpdk:
if not os.path.exists('dpdk') or not os.listdir('dpdk'):
raise Exception('--enable-dpdk: dpdk/ is empty. Run "git submodule update --init".')
cflags = args.user_cflags.split()
dpdk_machine = ([dpdk_arch_xlat[cflag[7:]]
for cflag in cflags
if cflag.startswith('-march')] or ['native'])[0]
subprocess.check_call('make -C dpdk RTE_OUTPUT=$PWD/build/dpdk/ config T=x86_64-native-linuxapp-gcc'.format(
dpdk_machine=dpdk_machine),
shell = True)
# adjust configutation to taste
dotconfig = 'build/dpdk/.config'
lines = open(dotconfig, encoding='UTF-8').readlines()
def update(lines, vars):
ret = []
for line in lines:
for var, val in vars.items():
if line.startswith(var + '='):
line = var + '=' + val + '\n'
ret.append(line)
return ret
lines = update(lines, {'CONFIG_RTE_LIBRTE_PMD_BOND': 'n',
'CONFIG_RTE_MBUF_SCATTER_GATHER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_APP_TEST': 'n',
'CONFIG_RTE_TEST_PMD': 'n',
'CONFIG_RTE_MBUF_REFCNT_ATOMIC': 'n',
'CONFIG_RTE_MAX_MEMSEG': '8192',
'CONFIG_RTE_EAL_IGB_UIO': 'n',
'CONFIG_RTE_LIBRTE_KNI': 'n',
'CONFIG_RTE_KNI_KMOD': 'n',
'CONFIG_RTE_LIBRTE_JOBSTATS': 'n',
'CONFIG_RTE_LIBRTE_LPM': 'n',
'CONFIG_RTE_LIBRTE_ACL': 'n',
'CONFIG_RTE_LIBRTE_POWER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_LIBRTE_METER': 'n',
'CONFIG_RTE_LIBRTE_SCHED': 'n',
'CONFIG_RTE_LIBRTE_DISTRIBUTOR': 'n',
'CONFIG_RTE_LIBRTE_PMD_CRYPTO_SCHEDULER': 'n',
'CONFIG_RTE_LIBRTE_REORDER': 'n',
'CONFIG_RTE_LIBRTE_PORT': 'n',
'CONFIG_RTE_LIBRTE_TABLE': 'n',
'CONFIG_RTE_LIBRTE_PIPELINE': 'n',
})
lines += 'CONFIG_RTE_MACHINE={}'.format(dpdk_machine)
open(dotconfig, 'w', encoding='UTF-8').writelines(lines)
args.dpdk_target = os.getcwd() + '/build/dpdk'
if args.dpdk_target:
args.user_cflags = (args.user_cflags +
' -DHAVE_DPDK -I' + args.dpdk_target + '/include ' +
dpdk_cflags(args.dpdk_target) +
' -Wno-error=literal-suffix -Wno-literal-suffix -Wno-invalid-offsetof')
libs += (' -L' + args.dpdk_target + '/lib ')
if args.with_osv:
libs += '-lintel_dpdk -lrt -lm -ldl'
else:
libs += '-Wl,--whole-archive -lrte_pmd_vmxnet3_uio -lrte_pmd_i40e -lrte_pmd_ixgbe -lrte_pmd_e1000 -lrte_pmd_ring -lrte_pmd_bnxt -lrte_pmd_cxgbe -lrte_pmd_ena -lrte_pmd_enic -lrte_pmd_fm10k -lrte_pmd_nfp -lrte_pmd_qede -lrte_pmd_sfc_efx -lrte_hash -lrte_kvargs -lrte_mbuf -lrte_ethdev -lrte_eal -lrte_mempool -lrte_mempool_ring -lrte_ring -lrte_cmdline -lrte_cfgfile -Wl,--no-whole-archive -lrt -lm -ldl'
args.user_cflags += ' -I{srcdir}/fmt'.format(**globals())
if not args.staticboost:
args.user_cflags += ' -DBOOST_TEST_DYN_LINK'
warnings = [w
for w in warnings
if warning_supported(warning = w, compiler = args.cxx)]
warnings = ' '.join(warnings)
dbgflag = debug_flag(args.cxx) if args.debuginfo else ''
tests_link_rule = 'link' if args.tests_debuginfo else 'link_stripped'
sanitize_flags = sanitize_vptr_flag(args.cxx)
visibility_flags = adjust_visibility_flags(args.cxx)
visibility_flags = '-fvisibility=hidden ' + visibility_flags
if not try_compile(args.cxx, '#include <gnutls/gnutls.h>'):
print('Seastar requires gnutls. Install gnutls-devel/libgnutls-dev')
sys.exit(1)
if not try_compile(args.cxx, '#include <gnutls/gnutls.h>\nint x = GNUTLS_NONBLOCK;'):
print('Seastar requires gnutls >= 2.8. Install libgnutls28-dev or later.')
sys.exit(1)
if not try_compile(args.cxx, '#include <experimental/string_view>', ['-std=gnu++1y']):
print('Seastar requires g++ >= 4.9. Install g++-4.9 or later (use --compiler option).')
sys.exit(1)
if not try_compile(args.cxx, '''#include <boost/version.hpp>\n\
#if BOOST_VERSION < 105500\n\
#error "Invalid boost version"\n\
#endif'''):
print("Seastar requires boost >= 1.55")
sys.exit(1)
modes['debug']['sanitize'] += ' ' + sanitize_flags
def have_hwloc():
return try_compile(compiler = args.cxx, source = '#include <hwloc.h>\n#include <numa.h>')
if apply_tristate(args.hwloc, test = have_hwloc,
note = 'Note: hwloc-devel/numactl-devel not installed. No NUMA support.',
missing = 'Error: required packages hwloc-devel/numactl-devel not installed.'):
libs += ' ' + hwloc_libs
defines.append('HAVE_HWLOC')
defines.append('HAVE_NUMA')
if try_compile(args.cxx, source = textwrap.dedent('''\
#include <lz4.h>
void m() {
LZ4_compress_default(static_cast<const char*>(0), static_cast<char*>(0), 0, 0);
}
''')):
defines.append("HAVE_LZ4_COMPRESS_DEFAULT")
if try_compile_and_link(args.cxx, flags=['-fsanitize=address'], source = textwrap.dedent('''\
#include <cstddef>
extern "C" {
void __sanitizer_start_switch_fiber(void**, const void*, size_t);
void __sanitizer_finish_switch_fiber(void*, const void**, size_t*);
}
int main() {
__sanitizer_start_switch_fiber(nullptr, nullptr, 0);
__sanitizer_finish_switch_fiber(nullptr, nullptr, nullptr);
}
''')):
defines.append("HAVE_ASAN_FIBER_SUPPORT")
if args.embedded_static:
args.pie = ''
args.fpie = '-fPIC'
visibility_flags = visibility_flags.replace('-fvisibility=hidden ', '')
modes[args.mode]['cares_opts'] += ' -DCMAKE_C_FLAGS=-fPIC'
elif args.so:
args.pie = '-shared'
args.fpie = '-fpic'
elif args.pie:
args.pie = '-pie'
args.fpie = '-fpie'
else:
args.pie = ''
args.fpie = ''
defines = ' '.join(['-D' + d for d in defines])
globals().update(vars(args))
total_memory = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
link_pool_depth = max(int(total_memory / 7e9), 1)
build_modes = modes if args.mode == 'all' else [args.mode]
build_artifacts = all_artifacts if not args.artifacts else args.artifacts
dpdk_sources = []
if args.dpdk:
for root, dirs, files in os.walk('dpdk'):
dpdk_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
dpdk_sources = ' '.join(dpdk_sources)
# both source and builddir location
cares_dir = 'c-ares'
cares_lib = 'cares-seastar'
cares_src_lib = cares_dir + '/lib/libcares.a'
if not os.path.exists(cares_dir) or not os.listdir(cares_dir):
raise Exception(cares_dir + ' is empty. Run "git submodule update --init".')
cares_sources = []
for root, dirs, files in os.walk('c-ares'):
cares_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
cares_sources = ' '.join(cares_sources)
libs += ' -l' + cares_lib
# "libs" contains mostly pre-existing libraries, but if we want to add to
# it a library which we built here, we need to ensure that this library
# gets built before actually using "libs". So let's make a list "built_libs"
# of libraries which are targets built here. These libraries are all relative
# to the current mode's build directory.
built_libs = []
built_libs += ['lib' + cares_lib + '.a']
outdir = 'build'
buildfile = 'build.ninja'
os.makedirs(outdir, exist_ok = True)
do_sanitize = True
if args.static:
do_sanitize = False
with open(buildfile, 'w') as f:
dpdk_deps = ''
if args.dpdk:
# fake dependencies on dpdk, so that it is built before anything else
dpdk_deps = ' {dpdk_target}/include/rte_eal.h {dpdk_target}/lib/librte_eal.a'.format(dpdk_target=args.dpdk_target)
f.write(textwrap.dedent('''\
configure_args = {configure_args}
builddir = {outdir}
full_builddir = {srcdir}/$builddir
cxx = {cxx}
# we disable _FORTIFY_SOURCE because it generates false positives with longjmp() (core/thread.cc)
cxxflags = -std=gnu++1y {dbgflag} {fpie} -Wall -Wno-unused-variable -Wno-unused-but-set-variable -Wno-error=deprecated-declarations {visibility_flags} -pthread -I{srcdir} -U_FORTIFY_SOURCE {user_cflags} {warnings} {defines}
ldflags = {dbgflag} -Wl,--no-as-needed {static} {pie} {visibility_flags} -pthread {user_ldflags}
libs = {libs}
pool link_pool
depth = {link_pool_depth}
rule ragel
# sed away a bug in ragel 7 that emits some extraneous _nfa* variables
# (the $$ is collapsed to a single one by ninja)
command = ragel -G2 -o $out $in && sed -i -e '1h;2,$$H;$$!d;g' -re 's/static const char _nfa[^;]*;//g' $out
description = RAGEL $out
rule gen
command = /bin/echo -e $text > $out
description = GEN $out
rule swagger
command = json/json2code.py -f $in -o $out
description = SWAGGER $out
rule protobuf
command = protoc --cpp_out=$outdir $in
description = PROTOC $out
rule copy_file
command = cp $in $out
''').format(**globals()))
if args.dpdk:
f.write(textwrap.dedent('''\
rule dpdkmake
command = make -C build/dpdk CC={args.cc}
build {dpdk_deps} : dpdkmake {dpdk_sources}
''').format(**globals()))
for mode in build_modes:
objdeps = {}
modeval = modes[mode]
if modeval['sanitize'] and not do_sanitize:
print('Note: --static disables debug mode sanitizers')
modeval['sanitize'] = ''
modeval['sanitize_libs'] = ''
elif modeval['sanitize']:
modeval['sanitize'] += ' -DASAN_ENABLED'
f.write(textwrap.dedent('''\
cxxflags_{mode} = {sanitize} {opt} -I$full_builddir/{mode}/gen -I$full_builddir/{mode}/c-ares
libs_{mode} = {sanitize_libs} {libs}
rule cxx.{mode}
command = $cxx -MD -MT $out -MF $out.d $cxxflags_{mode} $cxxflags -c -o $out $in
description = CXX $out
depfile = $out.d
rule link.{mode}
command = $cxx $cxxflags_{mode} -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK $out
pool = link_pool
rule link_stripped.{mode}
command = $cxx $cxxflags_{mode} -s -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK (stripped) $out
pool = link_pool
rule ar.{mode}
command = rm -f $out; ar cr $out $in; ranlib $out
description = AR $out
''').format(mode = mode, **modeval))
f.write('build {mode}: phony $builddir/{mode}/lib{cares_lib}.a {artifacts}\n'.format(mode = mode, cares_lib=cares_lib,
artifacts = str.join(' ', ('$builddir/' + mode + '/' + x for x in build_artifacts))))
f.write(textwrap.dedent('''\
rule caresmake_{mode}
command = make -C build/{mode}/{cares_dir} CC={args.cc}
rule carescmake_{mode}
command = mkdir -p $builddir/{mode}/{cares_dir} && cd $builddir/{mode}/{cares_dir} && CC={args.cc} cmake {cares_opts} {srcdir}/$in
build $builddir/{mode}/{cares_dir}/Makefile : carescmake_{mode} {cares_dir}
build $builddir/{mode}/{cares_dir}/ares_build.h : phony $builddir/{mode}/{cares_dir}/Makefile
build $builddir/{mode}/{cares_src_lib} : caresmake_{mode} $builddir/{mode}/{cares_dir}/Makefile | {cares_sources}
build $builddir/{mode}/lib{cares_lib}.a : copy_file $builddir/{mode}/{cares_src_lib}
''').format(cares_opts=(modeval['cares_opts']), **globals()))
objdeps['$builddir/' + mode + '/net/dns.o'] = ' $builddir/' + mode + '/' + cares_dir + '/ares_build.h'
compiles = {}
ragels = {}
swaggers = {}
protobufs = {}
for binary in build_artifacts:
srcs = deps[binary]
objs = ['$builddir/' + mode + '/' + src.replace('.cc', '.o')
for src in srcs
if src.endswith('.cc')]
objs += ['$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.o')
for src in srcs
if src.endswith('.proto')]
if binary.endswith('.pc'):
vars = modeval.copy()
vars.update(globals())
pc = textwrap.dedent('''\
Name: Seastar
URL: http://seastar-project.org/
Description: Advanced C++ framework for high-performance server applications on modern hardware.
Version: 1.0
Libs: -L$full_builddir/{mode} -Wl,--whole-archive,-lseastar,--no-whole-archive $cxxflags $cxflags_{mode} -Wl,--no-as-needed {static} {pie} {user_ldflags} {sanitize_libs} {libs}
Cflags: $cxxflags $cxxflags_{mode}
''').format(**vars)
f.write('build $builddir/{}/{}: gen\n text = {}\n'.format(mode, binary, repr(pc)))
elif binary.endswith('.a'):
f.write('build $builddir/{}/{}: ar.{} {}\n'.format(mode, binary, mode, str.join(' ', objs)))
else:
libdeps = str.join(' ', ('$builddir/{}/{}'.format(mode, i) for i in built_libs))
extralibs = []
if binary.startswith('tests/'):
if binary in boost_tests:
extralibs += [maybe_static(args.staticboost, '-lboost_unit_test_framework')]
# Our code's debugging information is huge, and multiplied
# by many tests yields ridiculous amounts of disk space.
# So we strip the tests by default; The user can very
# quickly re-link the test unstripped by adding a "_g"
# to the test name, e.g., "ninja build/release/testname_g"
f.write('build $builddir/{}/{}: {}.{} {} | {} {}\n'.format(mode, binary, tests_link_rule, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(extralibs)))
f.write('build $builddir/{}/{}_g: link.{} {} | {} {}\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(extralibs)))
else:
f.write('build $builddir/{}/{}: link.{} {} | {} {} $builddir/{}/lib{}.a\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps, mode, cares_lib))
for src in srcs:
if src.endswith('.cc'):
obj = '$builddir/' + mode + '/' + src.replace('.cc', '.o')
compiles[obj] = src
elif src.endswith('.proto'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.h')
protobufs[hh] = src
compiles[hh.replace('.h', '.o')] = hh.replace('.h', '.cc')
elif src.endswith('.rl'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.rl', '.hh')
ragels[hh] = src
elif src.endswith('.json'):
hh = '$builddir/' + mode + '/gen/' + src + '.hh'
swaggers[hh] = src
else:
raise Exception('No rule for ' + src)
for obj in compiles:
src = compiles[obj]
gen_headers = list(ragels.keys()) + list(swaggers.keys()) + list(protobufs.keys())
f.write('build {}: cxx.{} {} || {} \n'.format(obj, mode, src, ' '.join(gen_headers) + dpdk_deps + objdeps.get(obj, '')))
for hh in ragels:
src = ragels[hh]
f.write('build {}: ragel {}\n'.format(hh, src))
for hh in swaggers:
src = swaggers[hh]
f.write('build {}: swagger {} | json/json2code.py\n'.format(hh,src))
for pb in protobufs:
src = protobufs[pb]
c_pb = pb.replace('.h','.cc')
outd = os.path.dirname(os.path.dirname(pb))
f.write('build {} {}: protobuf {}\n outdir = {}\n'.format(c_pb, pb, src, outd))
f.write(textwrap.dedent('''\
rule configure
command = python3 configure.py $configure_args
generator = 1
build build.ninja: configure | configure.py
rule cscope
command = find -name '*.[chS]' -o -name "*.cc" -o -name "*.hh" | cscope -bq -i-
description = CSCOPE
build cscope: cscope
rule md2html
command = pandoc --self-contained --toc -c doc/template.css -V documentclass=report --chapters --number-sections -f markdown_github+pandoc_title_block --highlight-style tango $in -o $out
description = PANDOC $out
rule md2pdf
command = pandoc -f markdown_github+pandoc_title_block --highlight-style tango --template=doc/template.tex $in -o $out
description = PANDOC $out
build doc/tutorial.html: md2html doc/tutorial.md
build doc/tutorial.pdf: md2pdf doc/tutorial.md
default {modes_list}
''').format(modes_list = ' '.join(build_modes), **globals())) | |
reverse.py | import unittest
def reverse(str) -> str:
if len(str) < 2:
return str
else:
result = ''
for character in reversed(str):
result += character
return result
class TestReverse(unittest.TestCase): | def setUp(self):
pass
def test_string(self):
self.assertEqual(reverse('Hi my name is Lukas!'), '!sakuL si eman ym iH')
def test_empty_string(self):
self.assertEqual(reverse(''), '')
def test_crazy_string(self):
self.assertEqual(reverse('!.!!'), '!!.!')
def test_single_string(self):
self.assertEqual(reverse('a'), 'a')
if __name__ == '__main__':
print(reverse('Hi my name is Lukas!'))
# unittest.main() | |
companyEnvURL.js | var comEnvURL, aldta = {}, envData = {};
$(document).ready(function () {
displayAllEnvironmentUrls();
$.ajax({
url: base_url + "/application/allByCompany",
method: "get",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
var options = "";
$.each(data, function (key, value) {
options += '<option value="' + value.applicationId + '">' + value.applicationName + '</option>';
});
$("select[name=application_id]").append(options);
}
});
$.ajax({
url: base_url + "/environment/all",
method: "get",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
envData = data;
var payload = "";
$.each(data, function (key, value) {
payload += '<div class="form-group">';
payload += '<label for="exampleInputEmail1">' + value.environmentName + '</label>';
payload += '<input class="form-control" data-envurl-id="" data-env-id="' + value.environmentId + '" name="' + value.environmentName + '" placeholder="' + value.environmentName + '" type="text">';
payload += '</div>';
});
$('#modal_ajax .panel-body .modal-body .col-md-12').append(payload);
$("select[name=application_id]").on("change", function () {
$('#modal_ajax input').val('');
$('#modal_ajax textarea').val('');
if (this.value != "") {
$.ajax({
url: base_url + "/companyEnvironUrl/getAllByCompanyId/" + $("select[name=application_id]").val(),
method: "get",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
aldta = data;
var payload = "";
$.each(data.companyEnvironUrls, function (key, value) {
$('#modal_ajax .panel-body .modal-body .col-md-12 input[name=' + value.environment.environmentName + ']').val(value.envUrl);
$('#modal_ajax .panel-body .modal-body .col-md-12 input[name=' + value.environment.environmentName + ']').attr("data-envurl-id", value.companyEnvironUrlId);
});
}
});
}
});
}
});
//serialize object function
$.fn.serializeObject = function () {
var o = {};
var a = this.serializeArray();
$.each(a, function () {
if (o[this.name]) {
if (!o[this.name].push) {
o[this.name] = [o[this.name]];
}
o[this.name].push(this.value || '');
} else {
o[this.name] = this.value || '';
}
});
return o;
};
$.delete = function (url, data, callback, type) {
if ($.isFunction(data)) {
type = type || callback,
callback = data,
data = {}
}
return $.ajax({
url: url,
type: 'DELETE',
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: callback,
data: data,
contentType: type
});
}
var tableFixed = $('#table-example-fixed').dataTable({
'info': false,
'pageLength': 50
});
//new $.fn.dataTable.FixedHeader(tableFixed);
});
function addCompanyEnvironUrlId(companyEnvironUrlId = 0) {
if ($("select[name=application_id]:visible").val() == "") {
showError();
return false;
}
/*var data = {};
if(aldta.companyEnvironUrls.length > 0) {
$.each(aldta.companyEnvironUrls, function(k,v){
v.envUrl = $('input[name='+v.environment.environmentName+']:visible').val();
v.environment.environmentId = $('input[name='+v.environment.environmentName+']:visible').attr("data-env-id");
});
}
else {*/
//data = envData;
//aldta.companyEnvironUrls = [];
$.each(envData, function (k, v) {
if (aldta.companyEnvironUrls[k] == undefined) {
aldta.companyEnvironUrls[k] = {};
}
aldta.companyEnvironUrls[k].envUrl = $('input[name=' + v.environmentName + ']:visible').val();
aldta.companyEnvironUrls[k].environment = {};
aldta.companyEnvironUrls[k].environment.environmentId = $('input[name=' + v.environmentName + ']:visible').attr("data-env-id");
});
//}
$.ajax({
type: 'POST',
data: JSON.stringify(aldta),
contentType: 'application/json',
dataType: 'json',
url: base_url + "/companyEnvironUrl/saveAll",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (msg) {
$('.modal').modal('hide');
displayAllEnvironmentUrls();
// window.location.href = window.location.href;
// if (!alert(successMsg)) {
// window.location.href = window.location.href;
// }
}
});
return false;
}
/*function addCompanyEnvironUrlId(companyEnvironUrlId=0){
$applicationId = $('select[name=application_id]:visible').val();
$environmentName = $('input[name=environment]:visible').val();
if($environmentName == '')
{
$('#res').html("<span style='color:red;text-transform:capitalize;font-size:14px'>Enter Environment Name..!</span>");
return false;
}
var dataObj = {};
dataObj["environmentName"]= $environmentName;
dataObj["applicationId"]= $applicationId;
if(companyEnvironUrlId!==0){
dataObj["companyEnvironUrlId"] = companyEnvironUrlId;
}
$.ajax({
type: 'POST',
data: JSON.stringify(dataObj),
contentType: 'application/json',
dataType: 'json',
url: base_url+"/companyEnvironUrl/save",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function(msg){
$('.modal').modal('hide');
window.location.href= window.location.href;
}
});
return false;
}
*/
function showViewModal(id) {
$.ajax({
url: base_url + "/companyEnvironUrl/getAllByCompanyId/" + id,
method: "get",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
var payload = "";
$.each(data.companyEnvironUrls, function (index, value) {
payload += '<tr>';
payload += '<td>' + (index + 1) + '</td>';
payload += '<td>' + value.environment.environmentName + '</td>';
payload += '<td>' + value.envUrl + '</td>';
payload += '</tr>';
});
$(".panel-title h5 b").html("Application Name : " + comEnvURL[id].applicationName);
$('#view_modal table#viewTabale tbody').html(payload);
$('#view_modal').modal('show', { backdrop: 'true' });
}
});
}
function showUpdateModal(url) {
$('#modal_ajax').modal('show', { backdrop: 'true' });
$("select[name=application_id]").val(url).change();
}
function showAddModal(url) {
jQuery('#modal_ajax input').val('');
// LOADING THE AJAX MODAL
jQuery('#modal_ajax').modal('show', {
backdrop: 'true'
});
}
function showAjaxModal() {
// LOADING THE AJAX MODAL
jQuery('#modal_ajax').modal('show', {
backdrop: 'true'
});
}
function showTestImage(url) {
// SHOWING AJAX PRELOADER IMAGE
jQuery('#image_ajax .modal-body').html('<div style="text-align:center;margin-top:200px;"><img src="Libraries/img/loader.GIF" style="height:50px;" /></div>');
// LOADING THE AJAX MODAL
jQuery('#image_ajax').modal('show', {
backdrop: 'true'
});
// SHOW AJAX RESPONSE ON REQUEST SUCCESS
$.ajax({
url: url,
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (response) {
jQuery('#image_ajax .modal-body').html(response);
}
});
}
function confirm_modal(delete_url, post_refresh_url) {
$('#preloader-delete').html('');
jQuery('#modal_delete').modal('show', {
backdrop: 'static'
});
document.getElementById('delete_link').setAttribute("onClick", "delete_data('" + delete_url + "' , '" + post_refresh_url + "')");
document.getElementById('delete_link').focus();
}
function checkDelete(applicationId) {
var chk = confirm("Are You Sure To Delete This !");
if (chk) {
$.ajax({
url: base_url + "/companyEnvironUrl/byApplication/" + applicationId,
type: 'DELETE',
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (response) {
window.location.href = window.location.href;
}
});
return true;
} else {
return false;
}
}
function saveEnvironment(dataObj) {
$.ajax({
type: 'POST',
data: JSON.stringify(dataObj),
contentType: 'application/json',
dataType: 'json',
url: base_url + "/environment/save",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function () {
$("a.addRowBtn").css("pointer-events", "");
$("a.addRowBtn").css("opacity", "");
$("#deleteRow").attr("disabled", false);
$("#deleteRow1").attr("disabled", false);
$("#deleteRow2").attr("disabled", false);
$("#execution_environment").val('');
$("button.addEnvBtn").closest(".addRowData").slideUp();
$("#executionEnvironmentAddition").modal();
fetchAllEnvironment();
}
});
}
function deleteSelectedEnvironment(environmentName) {
$.ajax({
type: 'DELETE',
contentType: 'application/json',
dataType: 'json',
async: false,
url: base_url + "/environment/" + readCookie("TAuid") + "/" + environmentName,
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function () {
$(this).closest("tr").remove();
}
});
}
function fetchAllApplications() {
$.ajax({
type: 'GET',
contentType: 'application/json',
dataType: 'json',
url: base_url + "/application/allByCompany",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
var options = '<option value="0">Select Application</option>';
data.map((item) => {
options = options + '<option value="' + item.applicationId + '">' + item.applicationName + '</option>';
})
$("#application").html(options);
}
});
}
function fetchAllEnvironments() {
$.ajax({
type: 'GET',
contentType: 'application/json',
dataType: 'json',
url: base_url + "/environment/all",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
var options = '<option value="0">Select Environment</option>';
data.map((item) => {
options = options + '<option value="' + item.environmentId + '">' + item.environmentName + '</option>';
})
$("#url_environment").html(options);
}
});
}
function saveEnvironmentUrl(dataObj) {
$.ajax({
type: 'POST',
data: JSON.stringify(dataObj),
contentType: 'application/json',
dataType: 'json',
url: base_url + "/companyEnvironUrl/save",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
$("#savedEnvironmentUrl").modal();
closeEnvironmentUrlInput();
displayAllEnvironmentUrls();
// window.location.href= window.location.href;
}
});
}
function closeEnvironmentUrlInput() {
$("#execution_environment").val("");
$("#url_environment").val("0");
$("#application").val("0");
$("#url").val("");
$("#username").val("");
$("#password").val("");
$("#confirm_password").val("");
$("#role").val("");
$("a.addRowBtn").css("pointer-events", "");
$("a.addRowBtn").css("opacity", "");
$("#deleteRow").attr("disabled", false);
$("#deleteRow1").attr("disabled", false);
$("#deleteRow2").attr("disabled", false);
$("button.cancelRow").closest(".addRowData").slideUp();
}
function displayAllEnvironmentUrls() {
$.ajax({
url: base_url + "/companyEnvironUrl/findAllByCompanyId",
method: "get",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
var payload = "";
var appOptions = "";
comEnvURL = data.map;
$.each(data.map, function (key, value) {
$.each(value.environmentList, function (k, v) {
payload += '<tr>';
payload += '<td scope="col" class="bucketcheck">';
payload += '<label class="main subCB">';
payload += '<input type="checkbox" data-value=' + v.companyEnvironUrlId + '>';
payload += '<span class="geekmark"></span>';
payload += '</label>';
payload += '</td>';
payload += '<td>' + value.applicationName + '</td>';
payload += '<td>' + v.environment.environmentName + '</td>';
payload += '<td>' + v.envUrl + '</td>';
payload += '</tr>';
});
});
if(payload != ""){
$(".UrltableParent .paging_full_numbers").remove()
$('.Urltable').dataTable().fnClearTable();
$('.Urltable').dataTable().fnDestroy();
$(".Urltable tbody").html(payload);
$('.Urltable').DataTable({
"lengthChange": false,
"searching": false, // Search Box will Be Disabled
"ordering": true, // Ordering (Sorting on Each Column)will Be Disabled
"info": false,
"pagingType": "full_numbers"
});
$(".selectdiv").css("padding-left","4rem")
$(".bucketList_wrapper").css("padding-left","4rem")
$(".Urltable").css("margin-left","2rem")
}else{
$('.Urltable').dataTable().fnClearTable();
$('.Urltable').dataTable().fnDestroy();
}
}
});
}
function deleteSelectedEnvironmentUrl(environmentURLId) {
$.ajax({
type: 'DELETE',
contentType: 'application/json',
dataType: 'json',
url: base_url + "/companyEnvironUrl/" + environmentURLId,
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function () {
$(this).closest("tr").remove();
}
});
}
function displayAllAccessRoles() {
$.ajax({
url: base_url + "/accessRole/allByCompany",
method: "GET",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
var payload = "";
data.map((value) => {
console.log(value);
var Val = value.name;
payload += '<tr>';
payload += '<td scope="col" class="bucketcheck">';
payload += '<label class="main subCB">';
payload += '<input type="checkbox" data-value=' + value.executionUserId + '>';
payload += '<span class="geekmark"></span>';
payload += '</label>';
payload += '</td>';
payload += '<td>' + Val + '</td>';
payload += '<td>' + value.role + '</td>';
payload += '</tr>';
});
if(payload != ""){
$(".RoletableParent .paging_full_numbers").remove()
$('.Roletable').dataTable().fnClearTable();
$('.Roletable').dataTable().fnDestroy();
$(".Roletable tbody").html(payload);
$('.Roletable').DataTable({
"lengthChange": false,
"searching": false, // Search Box will Be Disabled
"ordering": true, // Ordering (Sorting on Each Column)will Be Disabled
"info": false,
"pagingType": "full_numbers"
});
$(".selectdiv").css("padding-left","4rem")
$(".bucketList_wrapper").css("padding-left","4rem")
$(".Roletable").css("margin-left","2rem")
}else{
$('.Roletable').dataTable().fnClearTable();
$('.Roletable').dataTable().fnDestroy();
}
}
});
}
function saveAccessRoles(dataObj) {
$.ajax({
type: 'POST',
data: JSON.stringify(dataObj),
contentType: 'application/json',
dataType: 'json',
url: base_url + "/accessRole/save",
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function (data) {
$("#addRoleModal").modal();
closeAccessRoleInput();
displayAllAccessRoles();
}
});
}
function closeAccessRoleInput() {
$("a.addRowBtn").css("pointer-events", "");
$("a.addRowBtn").css("opacity", "");
$("#deleteRow").attr("disabled", false);
$("#deleteRow1").attr("disabled", false);
$("#deleteRow2").attr("disabled", false);
$("button.addroleBtn").closest(".addRowData").slideUp();
$("#username").val("");
$("#password").val("");
$("#confirm_password").val("");
$("#role").val("");
}
function | (id) {
$.ajax({
type: 'DELETE',
contentType: 'application/json',
dataType: 'json',
url: base_url + "/accessRole/" + id,
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', "Bearer " + readCookie("TAaccess"));
},
success: function () {
displayAllAccessRoles();
}
});
}
function showLoader(){
$("#loader").addClass("loading");
}
function hideLoader(){
$("#loader").removeClass("loading");
}
| deleteSelectedAccessRole |
now_playing_model.rs | use gio::prelude::*;
use gio::{ActionMapExt, SimpleActionGroup};
use std::cell::Ref;
use std::ops::Deref;
use std::rc::Rc;
use crate::app::components::{labels, PlaylistModel};
use crate::app::models::SongModel;
use crate::app::state::{
PlaybackAction, PlaybackEvent, PlaybackState, SelectionAction, SelectionState,
};
use crate::app::{ActionDispatcher, AppEvent, AppModel, AppState};
pub struct NowPlayingModel {
app_model: Rc<AppModel>,
dispatcher: Box<dyn ActionDispatcher>,
}
impl NowPlayingModel {
pub fn new(app_model: Rc<AppModel>, dispatcher: Box<dyn ActionDispatcher>) -> Self {
Self {
app_model,
dispatcher,
}
}
fn state(&self) -> Ref<'_, AppState> {
self.app_model.get_state()
}
fn queue(&self) -> Ref<'_, PlaybackState> {
Ref::map(self.state(), |s| &s.playback)
}
pub fn toggle_shuffle(&self) {
self.dispatcher
.dispatch(PlaybackAction::ToggleShuffle.into());
}
pub fn clear_queue(&self) {
self.dispatcher.dispatch(PlaybackAction::ClearQueue.into());
}
}
impl PlaylistModel for NowPlayingModel {
fn current_song_id(&self) -> Option<String> {
self.queue().current_song_id.clone()
}
fn songs(&self) -> Vec<SongModel> {
self.queue()
.songs()
.enumerate()
.map(|(i, s)| s.to_song_model(i))
.collect()
}
fn play_song(&self, id: &str) {
self.dispatcher
.dispatch(PlaybackAction::Load(id.to_string()).into());
}
fn should_refresh_songs(&self, event: &AppEvent) -> bool {
matches!(
event,
AppEvent::PlaybackEvent(PlaybackEvent::PlaylistChanged)
)
}
fn actions_for(&self, id: &str) -> Option<gio::ActionGroup> {
let queue = self.queue();
let song = queue.song(id)?;
let group = SimpleActionGroup::new();
for view_artist in song.make_artist_actions(self.dispatcher.box_clone(), None) {
group.add_action(&view_artist);
}
group.add_action(&song.make_album_action(self.dispatcher.box_clone(), None));
group.add_action(&song.make_link_action(None));
group.add_action(&song.make_dequeue_action(self.dispatcher.box_clone(), None));
Some(group.upcast())
}
fn menu_for(&self, id: &str) -> Option<gio::MenuModel> {
let queue = self.queue();
let song = queue.song(id)?;
let menu = gio::Menu::new();
menu.append(Some(&*labels::VIEW_ALBUM), Some("song.view_album"));
for artist in song.artists.iter() {
menu.append(
Some(&format!("{} {}", *labels::MORE_FROM, artist.name)),
Some(&format!("song.view_artist_{}", artist.id)),
);
}
menu.append(Some(&*labels::COPY_LINK), Some("song.copy_link"));
menu.append(Some(&*labels::REMOVE_FROM_QUEUE), Some("song.dequeue"));
Some(menu.upcast())
}
fn select_song(&self, id: &str) {
let queue = self.queue();
if let Some(song) = queue.song(id) {
self.dispatcher
.dispatch(SelectionAction::Select(song.clone()).into());
}
}
fn deselect_song(&self, id: &str) |
fn selection(&self) -> Option<Box<dyn Deref<Target = SelectionState> + '_>> {
Some(Box::new(self.app_model.map_state(|s| &s.selection)))
}
}
| {
self.dispatcher
.dispatch(SelectionAction::Deselect(id.to_string()).into());
} |
views.py | from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import permission_required
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.decorators import method_decorator
from django.views.generic import View
from django.views.generic.edit import FormMixin
from account.mixins import LoginRequiredMixin
from .forms import InviteForm
from .models import InvitationStat, JoinInvitation
class InviteView(LoginRequiredMixin, FormMixin, View):
form_class = InviteForm
invite_form_fragment = "pinax/invitations/_invite_form.html"
invites_remaining_fragment = "pinax/invitations/_invites_remaining.html"
invited_fragment = "pinax/invitations/_invited.html"
invites_remaining_fragment_selector = ".pinax-invitations-invites-remaining"
invited_fragment_selector = ".pinax-invitations-invites-sent"
def get_data(self, form):
data = {
"html": render_to_string(
self.invite_form_fragment, {
"form": form,
"user": self.request.user
}, request=self.request
),
"fragments": {
self.invites_remaining_fragment_selector: render_to_string(
self.invites_remaining_fragment, {
"invites_remaining": self.request.user.invitationstat.invites_remaining()
}, request=self.request
),
self.invited_fragment_selector: render_to_string(
self.invited_fragment, {
"invited_list": self.request.user.invites_sent.all()
}, request=self.request
)
}
}
return data
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs.update({
"user": self.request.user
})
return kwargs
def form_valid(self, form):
email = form.cleaned_data["email_address"]
JoinInvitation.invite(self.request.user, email)
return JsonResponse(self.get_data(InviteForm(user=self.request.user)))
def form_invalid(self, form):
return JsonResponse(self.get_data(form))
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
class ManageInvitesView(LoginRequiredMixin, View):
@method_decorator(permission_required("pinax-invitations.manage_invites", raise_exception=True))
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
class InviteStatView(ManageInvitesView):
def get(self, request, *args, **kwargs):
user = get_object_or_404(get_user_model(), pk=kwargs.get("pk"))
return JsonResponse({
"html": render_to_string(
self.invite_stat_fragment, {
"stat": user.invitationstat
}, context_instance=RequestContext(request)
)
})
class ManageInviteAmountsView(ManageInvitesView):
amount_post_var = "amount"
def get_amount(self):
return int(self.request.POST.get(self.amount_post_var))
class AllManageInviteAmountsView(ManageInviteAmountsView):
def action(self, amount):
return
def post(self, request, *args, **kwargs):
amount = self.get_amount()
self.action(amount)
return JsonResponse({
"inner-fragments": {self.inner_fragments_amount_selector: amount}
})
class UserManageInviteAmountsView(ManageInviteAmountsView):
def action(self, user, amount):
return
def post(self, request, *args, **kwargs):
user = get_object_or_404(get_user_model(), pk=kwargs.get("pk"))
amount = self.get_amount()
self.action(user, amount)
return JsonResponse({
"html": amount
})
class TopOffAllView(AllManageInviteAmountsView):
inner_fragments_amount_selector = ".invite-total"
def action(self, amount):
|
class TopOffUserView(UserManageInviteAmountsView):
def action(self, user, amount):
InvitationStat.topoff_user(user=user, amount=amount)
class AddToAllView(AllManageInviteAmountsView):
inner_fragments_amount_selector = ".amount-added"
def action(self, amount):
InvitationStat.add_invites(amount)
class AddToUserView(UserManageInviteAmountsView):
def action(self, user, amount):
InvitationStat.add_invites_to_user(user=user, amount=amount)
| InvitationStat.topoff(amount) |
ComponentEventsObserver.ts | import * as _ from 'lodash';
import { EventSubscription } from '../interfaces/EventSubscription';
import {
ComponentDidAppearEvent,
ComponentDidDisappearEvent,
NavigationButtonPressedEvent,
SearchBarUpdatedEvent,
SearchBarCancelPressedEvent,
ComponentEvent,
PreviewCompletedEvent,
ModalDismissedEvent,
SideMenuDidAppearEvent,
SideMenuDidDisappearEvent
} from '../interfaces/ComponentEvents';
import { NativeEventsReceiver } from '../adapters/NativeEventsReceiver';
export class | {
private readonly listeners = {};
private alreadyRegistered = false;
constructor(private readonly nativeEventsReceiver: NativeEventsReceiver) {
this.notifyComponentDidAppear = this.notifyComponentDidAppear.bind(this);
this.notifyComponentDidDisappear = this.notifyComponentDidDisappear.bind(this);
this.notifyNavigationButtonPressed = this.notifyNavigationButtonPressed.bind(this);
this.notifyModalDismissed = this.notifyModalDismissed.bind(this);
this.notifySearchBarUpdated = this.notifySearchBarUpdated.bind(this);
this.notifySearchBarCancelPressed = this.notifySearchBarCancelPressed.bind(this);
this.notifyPreviewCompleted = this.notifyPreviewCompleted.bind(this);
this.notifySideMenuDidAppear = this.notifySideMenuDidAppear.bind(this);
this.notifySideMenuDidDisappear = this.notifySideMenuDidDisappear.bind(this);
}
public registerOnceForAllComponentEvents() {
if (this.alreadyRegistered) { return; }
this.alreadyRegistered = true;
this.nativeEventsReceiver.registerComponentDidAppearListener(this.notifyComponentDidAppear);
this.nativeEventsReceiver.registerComponentDidDisappearListener(this.notifyComponentDidDisappear);
this.nativeEventsReceiver.registerNavigationButtonPressedListener(this.notifyNavigationButtonPressed);
this.nativeEventsReceiver.registerModalDismissedListener(this.notifyModalDismissed);
this.nativeEventsReceiver.registerSearchBarUpdatedListener(this.notifySearchBarUpdated);
this.nativeEventsReceiver.registerSearchBarCancelPressedListener(this.notifySearchBarCancelPressed);
this.nativeEventsReceiver.registerPreviewCompletedListener(this.notifyPreviewCompleted);
this.nativeEventsReceiver.registerSideMenuDidAppearListener(this.notifySideMenuDidAppear);
this.nativeEventsReceiver.registerSideMenuDidDisappearListener(this.notifySideMenuDidDisappear);
}
public bindComponent(component: React.Component<any>, componentId?: string): EventSubscription {
const computedComponentId = componentId || component.props.componentId;
if (!_.isString(computedComponentId)) {
throw new Error(`bindComponent expects a component with a componentId in props or a componentId as the second argument`);
}
if (_.isNil(this.listeners[computedComponentId])) {
this.listeners[computedComponentId] = {};
}
const key = _.uniqueId();
this.listeners[computedComponentId][key] = component;
return { remove: () => _.unset(this.listeners[computedComponentId], key) };
}
public unmounted(componentId: string) {
_.unset(this.listeners, componentId);
}
notifyComponentDidAppear(event: ComponentDidAppearEvent) {
this.triggerOnAllListenersByComponentId(event, 'componentDidAppear');
}
notifyComponentDidDisappear(event: ComponentDidDisappearEvent) {
this.triggerOnAllListenersByComponentId(event, 'componentDidDisappear');
}
notifyNavigationButtonPressed(event: NavigationButtonPressedEvent) {
this.triggerOnAllListenersByComponentId(event, 'navigationButtonPressed');
}
notifyModalDismissed(event: ModalDismissedEvent) {
this.triggerOnAllListenersByComponentId(event, 'modalDismissed');
}
notifySearchBarUpdated(event: SearchBarUpdatedEvent) {
this.triggerOnAllListenersByComponentId(event, 'searchBarUpdated');
}
notifySearchBarCancelPressed(event: SearchBarCancelPressedEvent) {
this.triggerOnAllListenersByComponentId(event, 'searchBarCancelPressed');
}
notifyPreviewCompleted(event: PreviewCompletedEvent) {
this.triggerOnAllListenersByComponentId(event, 'previewCompleted');
}
notifySideMenuDidAppear(event: SideMenuDidAppearEvent) {
this.triggerOnAllListenersByComponentId(event, 'sideMenuDidAppear');
}
notifySideMenuDidDisappear(event: SideMenuDidDisappearEvent) {
this.triggerOnAllListenersByComponentId(event, 'sideMenuDidDisappear');
}
private triggerOnAllListenersByComponentId(event: ComponentEvent, method: string) {
_.forEach(this.listeners[event.componentId], (component) => {
if (_.isObject(component) && _.isFunction(component[method])) {
component[method](event);
}
});
}
}
| ComponentEventsObserver |
Step.js | import * as React from 'react';
import PropTypes from 'prop-types';
import clsx from 'clsx';
import { integerPropType } from '@material-ui/utils';
import { unstable_composeClasses as composeClasses } from '@material-ui/unstyled';
import StepperContext from '../Stepper/StepperContext';
import StepContext from './StepContext';
import useThemeProps from '../styles/useThemeProps';
import experimentalStyled from '../styles/experimentalStyled';
import { getStepUtilityClass } from './stepClasses';
const useUtilityClasses = (styleProps) => {
const { classes, orientation, alternativeLabel, completed } = styleProps;
const slots = {
root: ['root', orientation, alternativeLabel && 'alternativeLabel', completed && 'completed'],
};
return composeClasses(slots, getStepUtilityClass, classes);
};
const StepRoot = experimentalStyled('div', {
name: 'MuiStep',
slot: 'Root',
overridesResolver: (props, styles) => {
const { styleProps } = props;
return {
...styles.root,
...styles[styleProps.orientation],
...(styleProps.alternativeLabel && styles.alternativeLabel),
...(styleProps.completed && styles.completed),
};
},
})(({ styleProps }) => ({
/* Styles applied to the root element if `orientation="horizontal"`. */
...(styleProps.orientation === 'horizontal' && {
paddingLeft: 8,
paddingRight: 8,
}),
/* Styles applied to the root element if `alternativeLabel={true}`. */
...(styleProps.alternativeLabel && {
flex: 1,
position: 'relative',
}),
}));
const Step = React.forwardRef(function Step(inProps, ref) {
const props = useThemeProps({ props: inProps, name: 'MuiStep' });
const {
active: activeProp,
children,
className,
completed: completedProp,
disabled: disabledProp,
expanded = false,
index,
last,
...other
} = props;
const { activeStep, connector, alternativeLabel, orientation, nonLinear } =
React.useContext(StepperContext);
let [active = false, completed = false, disabled = false] = [
activeProp,
completedProp,
disabledProp,
];
if (activeStep === index) {
active = activeProp !== undefined ? activeProp : true;
} else if (!nonLinear && activeStep > index) {
completed = completedProp !== undefined ? completedProp : true;
} else if (!nonLinear && activeStep < index) {
disabled = disabledProp !== undefined ? disabledProp : true;
}
const contextValue = React.useMemo(
() => ({ index, last, expanded, icon: index + 1, active, completed, disabled }),
[index, last, expanded, active, completed, disabled],
);
const styleProps = {
...props,
active,
orientation,
alternativeLabel,
completed,
disabled,
expanded,
};
const classes = useUtilityClasses(styleProps);
const newChildren = (
<StepRoot
className={clsx(classes.root, className)}
ref={ref}
styleProps={styleProps}
{...other}
>
{connector && alternativeLabel && index !== 0 ? connector : null}
{children}
</StepRoot>
);
return (
<StepContext.Provider value={contextValue}> | {newChildren}
</React.Fragment>
) : (
newChildren
)}
</StepContext.Provider>
);
});
Step.propTypes /* remove-proptypes */ = {
// ----------------------------- Warning --------------------------------
// | These PropTypes are generated from the TypeScript type definitions |
// | To update them edit the d.ts file and run "yarn proptypes" |
// ----------------------------------------------------------------------
/**
* Sets the step as active. Is passed to child components.
*/
active: PropTypes.bool,
/**
* Should be `Step` sub-components such as `StepLabel`, `StepContent`.
*/
children: PropTypes.node,
/**
* Override or extend the styles applied to the component.
*/
classes: PropTypes.object,
/**
* @ignore
*/
className: PropTypes.string,
/**
* Mark the step as completed. Is passed to child components.
*/
completed: PropTypes.bool,
/**
* If `true`, the step is disabled, will also disable the button if
* `StepButton` is a child of `Step`. Is passed to child components.
*/
disabled: PropTypes.bool,
/**
* Expand the step.
* @default false
*/
expanded: PropTypes.bool,
/**
* The position of the step.
* The prop defaults to the value inherited from the parent Stepper component.
*/
index: integerPropType,
/**
* If `true`, the Step is displayed as rendered last.
* The prop defaults to the value inherited from the parent Stepper component.
*/
last: PropTypes.bool,
/**
* The system prop that allows defining system overrides as well as additional CSS styles.
*/
sx: PropTypes.object,
};
export default Step; | {connector && !alternativeLabel && index !== 0 ? (
<React.Fragment>
{connector} |
user_service.go | package main
import (
"encoding/json"
"errors"
"io/ioutil"
"log"
"time"
"github.com/dgrijalva/jwt-go"
)
type User struct {
Name string
Password string
}
func (u User) GenerateToken() (string) {
claims := &jwt.StandardClaims{
IssuedAt: time.Now().Unix(),
ExpiresAt: time.Now().Add(time.Minute * time.Duration(5)).Unix(),
Issuer: "wordcount",
Subject: u.Name,
}
token := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
tokenString, err := token.SignedString(jwtPrivateKey)
if err != nil {
log.Fatal(err)
}
return tokenString
}
type UserService struct {
users []User
}
// Low-fi user storage and authentication (I wanted to keep this simple).
// In a real world system, we'd probably be using a DB (w/ hashed passwords)
// or some other centralized service.
func (s UserService) AuthenticateCredentials(name string, password string) (user User, err error) {
for _, u := range s.users {
if u.Name == name && u.Password == password {
return u, nil
}
if u.Name == name {
return user, errors.New("invalid password")
}
}
return user, errors.New("invalid username")
}
func | () UserService {
content, err := ioutil.ReadFile("/home/wordcount/users.json")
if err != nil {
log.Fatal(err)
}
var users []User
json.Unmarshal(content, &users)
return UserService{ users: users }
}
| NewUserService |
Panel.checklist.ts | markupSupport: ChecklistStatus.unknown,
highContrastSupport: ChecklistStatus.pass,
rtlSupport: ChecklistStatus.pass,
testCoverage: ChecklistStatus.fair
}; | import { ChecklistStatus } from '../../common/DocPage.types';
export const PanelStatus = {
keyboardAccessibilitySupport: ChecklistStatus.unknown, |
|
catalogProductAttributeRepositoryV1SavePut.js | /**
* Auto-generated action file for "Magento Enterprise" API.
*
* Generated at: 2019-06-06T13:12:39.897Z
* Mass generator version: 1.1.0
*
* flowground :- Telekom iPaaS / magento-com-connector
* Copyright © 2019, Deutsche Telekom AG
* contact: [email protected]
*
* All files of this connector are licensed under the Apache 2.0 License. For details
* see the file LICENSE on the toplevel directory.
*
*
* Operation: 'catalogProductAttributeRepositoryV1SavePut'
* Endpoint Path: '/V1/products/attributes/{attributeCode}' |
const Swagger = require('swagger-client');
const processWrapper = require('../services/process-wrapper');
const spec = require('../spec.json');
// this wrapers offers a simplified emitData(data) function
module.exports.process = processWrapper(processAction);
// parameter names for this call
const PARAMETERS = [
"attributeCode"
];
// mappings from connector field names to API field names
const FIELD_MAP = {
"attributeCode": "attributeCode",
"apply_to": "apply_to",
"attribute_code": "attribute_code",
"attribute_id": "attribute_id",
"backend_model": "backend_model",
"backend_type": "backend_type",
"custom_attributes": "custom_attributes",
"default_frontend_label": "default_frontend_label",
"default_value": "default_value",
"entity_type_id": "entity_type_id",
"extension_attributes": "extension_attributes",
"frontend_class": "frontend_class",
"frontend_input": "frontend_input",
"frontend_labels": "frontend_labels",
"is_comparable": "is_comparable",
"is_filterable": "is_filterable",
"is_filterable_in_grid": "is_filterable_in_grid",
"is_filterable_in_search": "is_filterable_in_search",
"is_html_allowed_on_front": "is_html_allowed_on_front",
"is_required": "is_required",
"is_searchable": "is_searchable",
"is_unique": "is_unique",
"is_used_for_promo_rules": "is_used_for_promo_rules",
"is_used_in_grid": "is_used_in_grid",
"is_user_defined": "is_user_defined",
"is_visible": "is_visible",
"is_visible_in_advanced_search": "is_visible_in_advanced_search",
"is_visible_in_grid": "is_visible_in_grid",
"is_visible_on_front": "is_visible_on_front",
"is_wysiwyg_enabled": "is_wysiwyg_enabled",
"note": "note",
"options": "options",
"position": "position",
"scope": "scope",
"source_model": "source_model",
"used_for_sort_by": "used_for_sort_by",
"used_in_product_listing": "used_in_product_listing",
"validation_rules": "validation_rules",
"attribute": "attribute",
"requestBody": "requestBody"
};
function processAction(msg, cfg) {
var isVerbose = process.env.debug || cfg.verbose;
if (isVerbose) {
console.log(`---MSG: ${JSON.stringify(msg)}`);
console.log(`---CFG: ${JSON.stringify(cfg)}`);
console.log(`---ENV: ${JSON.stringify(process.env)}`);
}
const contentType = 'application/json';
const body = msg.body;
mapFieldNames(body);
let parameters = {};
for(let param of PARAMETERS) {
parameters[param] = body[param];
}
// credentials for this operation
let securities = {};
let callParams = {
spec: spec,
operationId: 'catalogProductAttributeRepositoryV1SavePut',
pathName: '/V1/products/attributes/{attributeCode}',
method: 'put',
parameters: parameters,
requestContentType: contentType,
requestBody: body.requestBody,
securities: {authorized: securities},
server: spec.servers[cfg.server] || cfg.otherServer,
};
if (isVerbose) {
let out = Object.assign({}, callParams);
out.spec = '[omitted]';
console.log(`--SWAGGER CALL: ${JSON.stringify(out)}`);
}
// Call operation via Swagger client
return Swagger.execute(callParams).then(data => {
// emit a single message with data
this.emitData(data);
// if the response contains an array of entities, you can emit them one by one:
// data.obj.someItems.forEach((item) => {
// this.emitData(item);
// }
});
}
function mapFieldNames(obj) {
if(Array.isArray(obj)) {
obj.forEach(mapFieldNames);
}
else if(typeof obj === 'object' && obj) {
Object.keys(obj).forEach(key => {
mapFieldNames(obj[key]);
let goodKey = FIELD_MAP[key];
if(goodKey && goodKey !== key) {
obj[goodKey] = obj[key];
delete obj[key];
}
});
}
} | * Method: 'put'
*
*/ |
main.rs | mod solute;
pub use solute::Solution;
// 将一个给定字符串根据给定的行数,以从上往下、从左到右进行 Z 字形排列。
// 比如输入字符串为 "LEETCODEISHIRING" 行数为 3 时,排列如下
fn test1() {
let t1 =String::from("LEETCODEISHIRING");
let ret=Solution::convert(t1,3);
println!("{:?} ",ret ); //3
assert_eq!(ret,String::from("LCIRETOESIIGEDHN"));
}
fn test2() {
let t1 =String::from("0123456789");
let ret=Solution::convert(t1,4);
println!("{:?} ",ret ); //3
}
fn test3() {
let t1 =String::from("0123456789");
let ret=Solution::convert(t1,2);
println!("{:?} ",ret ); //3
}
fn test4() {
let t1 =String::from("0123456789");
let ret=Solution::convert(t1,1);
println! | } ",ret ); //3
}
fn main() {
test1();
test2();
test3();
test4();
// test5();
// println!("Hello, world!");
}
| ("{:? |
formating.py | from collections.abc import Sequence
import mmcv
import numpy as np
import torch
from mmcv.parallel import DataContainer as DC
from ..builder import PIPELINES
def to_tensor(data):
"""Convert objects of various python types to :obj:`torch.Tensor`.
Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`,
:class:`Sequence`, :class:`int` and :class:`float`.
Args:
data (torch.Tensor | numpy.ndarray | Sequence | int | float): Data to
be converted.
"""
if isinstance(data, torch.Tensor):
return data
elif isinstance(data, np.ndarray):
return torch.from_numpy(data)
elif isinstance(data, Sequence) and not mmcv.is_str(data):
return torch.tensor(data)
elif isinstance(data, int):
return torch.LongTensor([data])
elif isinstance(data, float):
return torch.FloatTensor([data])
else:
raise TypeError(f'type {type(data)} cannot be converted to tensor.')
@PIPELINES.register_module()
class ToTensor:
"""Convert some results to :obj:`torch.Tensor` by given keys.
Args:
keys (Sequence[str]): Keys that need to be converted to Tensor.
"""
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
"""Call function to convert data in results to :obj:`torch.Tensor`.
Args:
results (dict): Result dict contains the data to convert.
Returns:
dict: The result dict contains the data converted
to :obj:`torch.Tensor`.
"""
for key in self.keys:
results[key] = to_tensor(results[key])
return results
def __repr__(self):
return self.__class__.__name__ + f'(keys={self.keys})'
@PIPELINES.register_module()
class ImageToTensor:
"""Convert image to :obj:`torch.Tensor` by given keys.
The dimension order of input image is (H, W, C). The pipeline will convert
it to (C, H, W). If only 2 dimension (H, W) is given, the output would be
(1, H, W).
Args:
keys (Sequence[str]): Key of images to be converted to Tensor.
"""
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
"""Call function to convert image in results to :obj:`torch.Tensor` and
transpose the channel order.
Args:
results (dict): Result dict contains the image data to convert.
Returns:
dict: The result dict contains the image converted
to :obj:`torch.Tensor` and transposed to (C, H, W) order.
"""
for key in self.keys:
img = results[key]
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
results[key] = to_tensor(img.transpose(2, 0, 1))
return results
def __repr__(self):
return self.__class__.__name__ + f'(keys={self.keys})'
@PIPELINES.register_module()
class Transpose:
"""Transpose some results by given keys.
Args:
keys (Sequence[str]): Keys of results to be transposed.
order (Sequence[int]): Order of transpose.
"""
def __init__(self, keys, order):
self.keys = keys
self.order = order
def __call__(self, results):
"""Call function to transpose the channel order of data in results.
Args:
results (dict): Result dict contains the data to transpose.
Returns:
dict: The result dict contains the data transposed to \
``self.order``.
"""
for key in self.keys:
results[key] = results[key].transpose(self.order)
return results
def __repr__(self):
return self.__class__.__name__ + \
f'(keys={self.keys}, order={self.order})'
@PIPELINES.register_module()
class ToDataContainer:
"""Convert results to :obj:`mmcv.DataContainer` by given fields.
Args:
fields (Sequence[dict]): Each field is a dict like
``dict(key='xxx', **kwargs)``. The ``key`` in result will
be converted to :obj:`mmcv.DataContainer` with ``**kwargs``.
Default: ``(dict(key='img', stack=True), dict(key='gt_bboxes'),
dict(key='gt_labels'))``.
"""
def __init__(self,
fields=(dict(key='img', stack=True), dict(key='gt_bboxes'),
dict(key='gt_labels'))):
|
def __call__(self, results):
"""Call function to convert data in results to
:obj:`mmcv.DataContainer`.
Args:
results (dict): Result dict contains the data to convert.
Returns:
dict: The result dict contains the data converted to \
:obj:`mmcv.DataContainer`.
"""
for field in self.fields:
field = field.copy()
key = field.pop('key')
results[key] = DC(results[key], **field)
return results
def __repr__(self):
return self.__class__.__name__ + f'(fields={self.fields})'
@PIPELINES.register_module()
class DefaultFormatBundle:
"""Default formatting bundle.
It simplifies the pipeline of formatting common fields, including "img",
"proposals", "gt_bboxes", "gt_labels", "gt_masks" and "gt_semantic_seg".
These fields are formatted as follows.
- img: (1)transpose, (2)to tensor, (3)to DataContainer (stack=True)
- proposals: (1)to tensor, (2)to DataContainer
- gt_bboxes: (1)to tensor, (2)to DataContainer
- gt_bboxes_ignore: (1)to tensor, (2)to DataContainer
- gt_labels: (1)to tensor, (2)to DataContainer
- gt_masks: (1)to tensor, (2)to DataContainer (cpu_only=True)
- gt_semantic_seg: (1)unsqueeze dim-0 (2)to tensor, \
(3)to DataContainer (stack=True)
"""
def __call__(self, results):
"""Call function to transform and format common fields in results.
Args:
results (dict): Result dict contains the data to convert.
Returns:
dict: The result dict contains the data that is formatted with \
default bundle.
"""
if 'img' in results:
img = results['img']
# add default meta keys
results = self._add_default_meta_keys(results)
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
img = np.ascontiguousarray(img.transpose(2, 0, 1))
results['img'] = DC(to_tensor(img), stack=True)
for key in ['proposals', 'gt_bboxes', 'gt_bboxes_ignore', 'gt_labels']:
if key not in results:
continue
results[key] = DC(to_tensor(results[key]))
if 'gt_masks' in results:
results['gt_masks'] = DC(results['gt_masks'], cpu_only=True)
if 'gt_semantic_seg' in results:
results['gt_semantic_seg'] = DC(
to_tensor(results['gt_semantic_seg'][None, ...]), stack=True)
return results
def _add_default_meta_keys(self, results):
"""Add default meta keys.
We set default meta keys including `pad_shape`, `scale_factor` and
`img_norm_cfg` to avoid the case where no `Resize`, `Normalize` and
`Pad` are implemented during the whole pipeline.
Args:
results (dict): Result dict contains the data to convert.
Returns:
results (dict): Updated result dict contains the data to convert.
"""
img = results['img']
results.setdefault('pad_shape', img.shape)
results.setdefault('scale_factor', 1.0)
num_channels = 1 if len(img.shape) < 3 else img.shape[2]
results.setdefault(
'img_norm_cfg',
dict(
mean=np.zeros(num_channels, dtype=np.float32),
std=np.ones(num_channels, dtype=np.float32),
to_rgb=False))
return results
def __repr__(self):
return self.__class__.__name__
@PIPELINES.register_module()
class Collect:
"""Collect data from the loader relevant to the specific task.
This is usually the last stage of the data loader pipeline. Typically keys
is set to some subset of "img", "proposals", "gt_bboxes",
"gt_bboxes_ignore", "gt_labels", and/or "gt_masks".
The "img_meta" item is always populated. The contents of the "img_meta"
dictionary depends on "meta_keys". By default this includes:
- "img_shape": shape of the image input to the network as a tuple \
(h, w, c). Note that images may be zero padded on the \
bottom/right if the batch tensor is larger than this shape.
- "scale_factor": a float indicating the preprocessing scale
- "flip": a boolean indicating if image flip transform was used
- "filename": path to the image file
- "ori_shape": original shape of the image as a tuple (h, w, c)
- "pad_shape": image shape after padding
- "img_norm_cfg": a dict of normalization information:
- mean - per channel mean subtraction
- std - per channel std divisor
- to_rgb - bool indicating if bgr was converted to rgb
Args:
keys (Sequence[str]): Keys of results to be collected in ``data``.
meta_keys (Sequence[str], optional): Meta keys to be converted to
``mmcv.DataContainer`` and collected in ``data[img_metas]``.
Default: ``('filename', 'ori_filename', 'ori_shape', 'img_shape',
'pad_shape', 'scale_factor', 'flip', 'flip_direction',
'img_norm_cfg')``
"""
def __init__(self,
keys,
meta_keys=('filename', 'ori_filename', 'ori_shape',
'img_shape', 'pad_shape', 'scale_factor', 'flip',
'flip_direction', 'img_norm_cfg')):
self.keys = keys
self.meta_keys = meta_keys
def __call__(self, results):
"""Call function to collect keys in results. The keys in ``meta_keys``
will be converted to :obj:mmcv.DataContainer.
Args:
results (dict): Result dict contains the data to collect.
Returns:
dict: The result dict contains the following keys
- keys in``self.keys``
- ``img_metas``
"""
data = {}
img_meta = {}
for key in self.meta_keys:
img_meta[key] = results[key]
data['img_metas'] = DC(img_meta, cpu_only=True)
for key in self.keys:
data[key] = results[key]
return data
def __repr__(self):
return self.__class__.__name__ + \
f'(keys={self.keys}, meta_keys={self.meta_keys})'
@PIPELINES.register_module()
class WrapFieldsToLists:
"""Wrap fields of the data dictionary into lists for evaluation.
This class can be used as a last step of a test or validation
pipeline for single image evaluation or inference.
Example:
>>> test_pipeline = [
>>> dict(type='LoadImageFromFile'),
>>> dict(type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
>>> dict(type='Pad', size_divisor=32),
>>> dict(type='ImageToTensor', keys=['img']),
>>> dict(type='Collect', keys=['img']),
>>> dict(type='WrapFieldsToLists')
>>> ]
"""
def __call__(self, results):
"""Call function to wrap fields into lists.
Args:
results (dict): Result dict contains the data to wrap.
Returns:
dict: The result dict where value of ``self.keys`` are wrapped \
into list.
"""
# Wrap dict fields into lists
for key, val in results.items():
results[key] = [val]
return results
def __repr__(self):
return f'{self.__class__.__name__}()'
| self.fields = fields |
routes.js | angular.module('app.routes', ['ionicUIRouter'])
.config(function($stateProvider, $urlRouterProvider) {
// Ionic uses AngularUI Router which uses the concept of states
// Learn more here: https://github.com/angular-ui/ui-router
// Set up the various states which the app can be in.
// Each state's controller can be found in controllers.js
$stateProvider
.state('tabsController.myInventory', {
url: '/page2',
views: {
'tab1': {
templateUrl: 'templates/myInventory.html',
controller: 'myInventoryCtrl'
}
}
})
.state('tabsController.requests', {
url: '/page3',
views: {
'tab2': {
templateUrl: 'templates/requests.html',
controller: 'requestsCtrl'
}
}
})
.state('tabsController.settings', {
url: '/page4',
views: {
'tab3': {
templateUrl: 'templates/settings.html',
controller: 'settingsCtrl'
}
}
})
.state('tabsController', {
url: '/page1',
templateUrl: 'templates/tabsController.html',
abstract:true
})
/*
The IonicUIRouter.js UI-Router Modification is being used for this route.
To navigate to this route, do NOT use a URL. Instead use one of the following:
1) Using the ui-sref HTML attribute:
ui-sref='tabsController.itemInfo' | 2) Using $state.go programatically:
$state.go('tabsController.itemInfo');
This allows your app to figure out which Tab to open this page in on the fly.
If you're setting a Tabs default page or modifying the .otherwise for your app and
must use a URL, use one of the following:
/page1/tab1/page6
/page1/tab5/page6
*/
.state('tabsController.itemInfo', {
url: '/page6',
views: {
'tab1': {
templateUrl: 'templates/itemInfo.html',
controller: 'itemInfoCtrl'
},
'tab5': {
templateUrl: 'templates/itemInfo.html',
controller: 'itemInfoCtrl'
}
}
})
.state('tabsController.browse', {
url: '/page7',
views: {
'tab5': {
templateUrl: 'templates/browse.html',
controller: 'browseCtrl'
}
}
})
$urlRouterProvider.otherwise('/page1/page2')
}); | |
middle_square_weyl_sequence_benchmarks.rs | use criterion::{black_box, criterion_group, criterion_main, Criterion};
use random_numbers::prelude::*;
fn bench_msws_init_with_no_seed(c: &mut Criterion) {
c.bench_function("msws_init_with_no_seed", |b| b.iter(|| random_numbers::new(RandomNumberAlgorithm::MiddleSquareWeylSequence)));
}
fn bench_msws_init_with_seed(c: &mut Criterion) {
c.bench_function("msws_init_with_seed", |b| b.iter(|| random_numbers::from_seed(RandomNumberAlgorithm::MiddleSquareWeylSequence, black_box(0))));
}
fn | (c: &mut Criterion) {
let mut rnd = random_numbers::from_seed(RandomNumberAlgorithm::MiddleSquareWeylSequence, black_box(0));
c.bench_function("msws_get_random_bools", |b| b.iter(|| rnd.next_bool()));
c.bench_function("msws_get_random_u8s", |b| b.iter(|| rnd.next_u8()));
c.bench_function("msws_get_random_i8s", |b| b.iter(|| rnd.next_i8()));
c.bench_function("msws_get_random_u16s", |b| b.iter(|| rnd.next_u16()));
c.bench_function("msws_get_random_i16s", |b| b.iter(|| rnd.next_i16()));
c.bench_function("msws_get_random_u32s", |b| b.iter(|| rnd.next_u32()));
c.bench_function("msws_get_random_i32s", |b| b.iter(|| rnd.next_i32()));
c.bench_function("msws_get_random_u64s", |b| b.iter(|| rnd.next_u64()));
c.bench_function("msws_get_random_i64s", |b| b.iter(|| rnd.next_i64()));
c.bench_function("msws_get_random_u128s", |b| b.iter(|| rnd.next_u128()));
c.bench_function("msws_get_random_i128s", |b| b.iter(|| rnd.next_i128()));
c.bench_function("msws_get_random_f32s", |b| b.iter(|| rnd.next_f32()));
c.bench_function("msws_get_random_f64s", |b| b.iter(|| rnd.next_f64()));
}
criterion_group!(init_msws_benches, bench_msws_init_with_no_seed, bench_msws_init_with_seed);
criterion_group!(generate_msws_benches, bench_msws_get_randoms);
criterion_main!(init_msws_benches, generate_msws_benches);
| bench_msws_get_randoms |
duration.py | """
sphinx.ext.duration
~~~~~~~~~~~~~~~~~~~
Measure durations of Sphinx processing.
:copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from datetime import datetime, timedelta
from itertools import islice
from operator import itemgetter
from typing import Any, Dict, List, cast
from docutils import nodes
from sphinx.application import Sphinx
from sphinx.domains import Domain
from sphinx.locale import __
from sphinx.util import logging
logger = logging.getLogger(__name__)
class DurationDomain(Domain):
"""A domain for durations of Sphinx processing."""
name = 'duration'
@property
def reading_durations(self) -> Dict[str, timedelta]:
return self.data.setdefault('reading_durations', {})
def note_reading_duration(self, duration: timedelta) -> None:
self.reading_durations[self.env.docname] = duration
def clear(self) -> None:
self.reading_durations.clear()
def clear_doc(self, docname: str) -> None:
self.reading_durations.pop(docname, None)
def merge_domaindata(self, docnames: List[str], otherdata: Dict[str, timedelta]) -> None:
for docname, duration in otherdata.items():
if docname in docnames:
self.reading_durations[docname] = duration
def on_builder_inited(app: Sphinx) -> None:
"""Initialize DurationDomain on bootstrap.
This clears results of last build.
"""
domain = cast(DurationDomain, app.env.get_domain('duration'))
domain.clear()
def | (app: Sphinx, docname: str, content: List[str]) -> None:
"""Start to measure reading duration."""
app.env.temp_data['started_at'] = datetime.now()
def on_doctree_read(app: Sphinx, doctree: nodes.document) -> None:
"""Record a reading duration."""
started_at = app.env.temp_data.get('started_at')
duration = datetime.now() - started_at
domain = cast(DurationDomain, app.env.get_domain('duration'))
domain.note_reading_duration(duration)
def on_build_finished(app: Sphinx, error: Exception) -> None:
"""Display duration ranking on current build."""
domain = cast(DurationDomain, app.env.get_domain('duration'))
durations = sorted(domain.reading_durations.items(), key=itemgetter(1), reverse=True)
if not durations:
return
logger.info('')
logger.info(__('====================== slowest reading durations ======================='))
for docname, d in islice(durations, 5):
logger.info('%d.%03d %s', d.seconds, d.microseconds / 1000, docname)
def setup(app: Sphinx) -> Dict[str, Any]:
app.add_domain(DurationDomain)
app.connect('builder-inited', on_builder_inited)
app.connect('source-read', on_source_read)
app.connect('doctree-read', on_doctree_read)
app.connect('build-finished', on_build_finished)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| on_source_read |
UserLayout.js | import React, { Component } from 'react';
import { Row, Col } from 'antd';
import { connect } from 'dva';
import PropTypes from 'prop-types';
/** */
class | extends Component {
/** 预挂载 */
componentDidMount() {
}
/** 组件挂载 */
render() {
const { children } = this.props;
return (
<Row>
<div style={{ position: 'absolute', zIndex: -10 }}>
<img src="./bg.svg" alt="" style={{ height: '100vh', width: '100%' }} />
</div>
<Col
span={6}
offset={9}
style={{ overflow: 'hidden', height: '100vh', paddingTop: 50 }}
>
{children}
</Col>
</Row>
);
}
}
UserLayout.defaultProps = { children: '' };
UserLayout.propTypes = { children: PropTypes.any };
export default connect(({ menu: menuModel }) => ({
menuData: menuModel.menuData,
breadcrumbNameMap: menuModel.breadcrumbNameMap,
}))(UserLayout);
| UserLayout |
intravisit.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! HIR walker for walking the contents of nodes.
//!
//! **For an overview of the visitor strategy, see the docs on the
//! `super::itemlikevisit::ItemLikeVisitor` trait.**
//!
//! If you have decided to use this visitor, here are some general
//! notes on how to do it:
//!
//! Each overridden visit method has full control over what
//! happens with its node, it can do its own traversal of the node's children,
//! call `intravisit::walk_*` to apply the default traversal algorithm, or prevent
//! deeper traversal by doing nothing.
//!
//! When visiting the HIR, the contents of nested items are NOT visited
//! by default. This is different from the AST visitor, which does a deep walk.
//! Hence this module is called `intravisit`; see the method `visit_nested_item`
//! for more details.
//!
//! Note: it is an important invariant that the default visitor walks
//! the body of a function in "execution order" - more concretely, if
//! we consider the reverse post-order (RPO) of the CFG implied by the HIR,
//! then a pre-order traversal of the HIR is consistent with the CFG RPO
//! on the *initial CFG point* of each HIR node, while a post-order traversal
//! of the HIR is consistent with the CFG RPO on each *final CFG point* of
//! each CFG node.
//!
//! One thing that follows is that if HIR node A always starts/ends executing
//! before HIR node B, then A appears in traversal pre/postorder before B,
//! respectively. (This follows from RPO respecting CFG domination).
//!
//! This order consistency is required in a few places in rustc, for
//! example generator inference, and possibly also HIR borrowck.
use syntax::ast::{NodeId, CRATE_NODE_ID, Ident, Name, Attribute};
use syntax_pos::Span;
use hir::*;
use hir::def::Def;
use hir::map::{self, Map};
use super::itemlikevisit::DeepVisitor;
use std::cmp;
use std::u32;
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum FnKind<'a> {
/// #[xxx] pub async/const/extern "Abi" fn foo()
ItemFn(Name, &'a Generics, FnHeader, &'a Visibility, &'a [Attribute]),
/// fn foo(&self)
Method(Ident, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
/// |x, y| {}
Closure(&'a [Attribute]),
}
impl<'a> FnKind<'a> {
pub fn attrs(&self) -> &'a [Attribute] {
match *self {
FnKind::ItemFn(.., attrs) => attrs,
FnKind::Method(.., attrs) => attrs,
FnKind::Closure(attrs) => attrs,
}
}
}
/// Specifies what nested things a visitor wants to visit. The most
/// common choice is `OnlyBodies`, which will cause the visitor to
/// visit fn bodies for fns that it encounters, but skip over nested
/// item-like things.
///
/// See the comments on `ItemLikeVisitor` for more details on the overall
/// visit strategy.
pub enum NestedVisitorMap<'this, 'tcx: 'this> {
/// Do not visit any nested things. When you add a new
/// "non-nested" thing, you will want to audit such uses to see if
/// they remain valid.
///
/// Use this if you are only walking some particular kind of tree
/// (i.e., a type, or fn signature) and you don't want to thread a
/// HIR map around.
None,
/// Do not visit nested item-like things, but visit nested things
/// that are inside of an item-like.
///
/// **This is the most common choice.** A very common pattern is
/// to use `visit_all_item_likes()` as an outer loop,
/// and to have the visitor that visits the contents of each item
/// using this setting.
OnlyBodies(&'this Map<'tcx>),
/// Visit all nested things, including item-likes.
///
/// **This is an unusual choice.** It is used when you want to
/// process everything within their lexical context. Typically you
/// kick off the visit by doing `walk_krate()`.
All(&'this Map<'tcx>),
}
impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
/// Returns the map to use for an "intra item-like" thing (if any).
/// e.g., function body.
pub fn intra(self) -> Option<&'this Map<'tcx>> {
match self {
NestedVisitorMap::None => None,
NestedVisitorMap::OnlyBodies(map) => Some(map),
NestedVisitorMap::All(map) => Some(map),
}
}
/// Returns the map to use for an "item-like" thing (if any).
/// e.g., item, impl-item.
pub fn inter(self) -> Option<&'this Map<'tcx>> {
match self {
NestedVisitorMap::None => None,
NestedVisitorMap::OnlyBodies(_) => None,
NestedVisitorMap::All(map) => Some(map),
}
}
}
/// Each method of the Visitor trait is a hook to be potentially
/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
/// e.g. the `visit_mod` method by default calls `intravisit::walk_mod`.
///
/// Note that this visitor does NOT visit nested items by default
/// (this is why the module is called `intravisit`, to distinguish it
/// from the AST's `visit` module, which acts differently). If you
/// simply want to visit all items in the crate in some order, you
/// should call `Crate::visit_all_items`. Otherwise, see the comment
/// on `visit_nested_item` for details on how to visit nested items.
///
/// If you want to ensure that your code handles every variant
/// explicitly, you need to override each method. (And you also need
/// to monitor future changes to `Visitor` in case a new method with a
/// new default implementation gets introduced.)
pub trait Visitor<'v> : Sized {
///////////////////////////////////////////////////////////////////////////
// Nested items.
/// The default versions of the `visit_nested_XXX` routines invoke
/// this method to get a map to use. By selecting an enum variant,
/// you control which kinds of nested HIR are visited; see
/// `NestedVisitorMap` for details. By "nested HIR", we are
/// referring to bits of HIR that are not directly embedded within
/// one another but rather indirectly, through a table in the
/// crate. This is done to control dependencies during incremental
/// compilation: the non-inline bits of HIR can be tracked and
/// hashed separately.
///
/// **If for some reason you want the nested behavior, but don't
/// have a `Map` at your disposal:** then you should override the
/// `visit_nested_XXX` methods, and override this method to
/// `panic!()`. This way, if a new `visit_nested_XXX` variant is
/// added in the future, we will see the panic in your code and
/// fix it appropriately.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v>;
/// Invoked when a nested item is encountered. By default does
/// nothing unless you override `nested_visit_map` to return
/// `Some(_)`, in which case it will walk the item. **You probably
/// don't want to override this method** -- instead, override
/// `nested_visit_map` or use the "shallow" or "deep" visit
/// patterns described on `itemlikevisit::ItemLikeVisitor`. The only
/// reason to override this method is if you want a nested pattern
/// but cannot supply a `Map`; see `nested_visit_map` for advice.
#[allow(unused_variables)]
fn visit_nested_item(&mut self, id: ItemId) {
let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item(id.id));
if let Some(item) = opt_item {
self.visit_item(item);
}
}
/// Like `visit_nested_item()`, but for trait items. See
/// `visit_nested_item()` for advice on when to override this
/// method.
#[allow(unused_variables)]
fn visit_nested_trait_item(&mut self, id: TraitItemId) {
let opt_item = self.nested_visit_map().inter().map(|map| map.trait_item(id));
if let Some(item) = opt_item {
self.visit_trait_item(item);
}
}
/// Like `visit_nested_item()`, but for impl items. See
/// `visit_nested_item()` for advice on when to override this
/// method.
#[allow(unused_variables)]
fn visit_nested_impl_item(&mut self, id: ImplItemId) {
let opt_item = self.nested_visit_map().inter().map(|map| map.impl_item(id));
if let Some(item) = opt_item {
self.visit_impl_item(item);
}
}
/// Invoked to visit the body of a function, method or closure. Like
/// visit_nested_item, does nothing by default unless you override
/// `nested_visit_map` to return `Some(_)`, in which case it will walk the
/// body.
fn visit_nested_body(&mut self, id: BodyId) {
let opt_body = self.nested_visit_map().intra().map(|map| map.body(id));
if let Some(body) = opt_body {
self.visit_body(body);
}
}
/// Visit the top-level item and (optionally) nested items / impl items. See
/// `visit_nested_item` for details.
fn visit_item(&mut self, i: &'v Item) {
walk_item(self, i)
}
fn visit_body(&mut self, b: &'v Body) {
walk_body(self, b);
}
/// When invoking `visit_all_item_likes()`, you need to supply an
/// item-like visitor. This method converts a "intra-visit"
/// visitor into an item-like visitor that walks the entire tree.
/// If you use this, you probably don't want to process the
/// contents of nested item-like things, since the outer loop will
/// visit them as well.
fn as_deep_visitor<'s>(&'s mut self) -> DeepVisitor<'s, Self> {
DeepVisitor::new(self)
}
///////////////////////////////////////////////////////////////////////////
fn visit_id(&mut self, _node_id: NodeId) {
// Nothing to do.
}
fn visit_def_mention(&mut self, _def: Def) {
// Nothing to do.
}
fn visit_name(&mut self, _span: Span, _name: Name) {
// Nothing to do.
}
fn visit_ident(&mut self, ident: Ident) {
walk_ident(self, ident)
}
fn visit_mod(&mut self, m: &'v Mod, _s: Span, n: NodeId) {
walk_mod(self, m, n)
}
fn visit_foreign_item(&mut self, i: &'v ForeignItem) {
walk_foreign_item(self, i)
}
fn visit_local(&mut self, l: &'v Local) {
walk_local(self, l)
}
fn visit_block(&mut self, b: &'v Block) {
walk_block(self, b)
}
fn visit_stmt(&mut self, s: &'v Stmt) {
walk_stmt(self, s)
}
fn visit_arm(&mut self, a: &'v Arm) {
walk_arm(self, a)
}
fn visit_pat(&mut self, p: &'v Pat) {
walk_pat(self, p)
}
fn visit_decl(&mut self, d: &'v Decl) {
walk_decl(self, d)
}
fn visit_anon_const(&mut self, c: &'v AnonConst) {
walk_anon_const(self, c)
}
fn visit_expr(&mut self, ex: &'v Expr) {
walk_expr(self, ex)
}
fn visit_ty(&mut self, t: &'v Ty) {
walk_ty(self, t)
}
fn visit_generic_param(&mut self, p: &'v GenericParam) {
walk_generic_param(self, p)
}
fn visit_generics(&mut self, g: &'v Generics) {
walk_generics(self, g)
}
fn visit_where_predicate(&mut self, predicate: &'v WherePredicate) {
walk_where_predicate(self, predicate)
}
fn visit_fn_decl(&mut self, fd: &'v FnDecl) {
walk_fn_decl(self, fd)
}
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: BodyId, s: Span, id: NodeId) {
walk_fn(self, fk, fd, b, s, id)
}
fn visit_trait_item(&mut self, ti: &'v TraitItem) {
walk_trait_item(self, ti)
}
fn visit_trait_item_ref(&mut self, ii: &'v TraitItemRef) {
walk_trait_item_ref(self, ii)
}
fn visit_impl_item(&mut self, ii: &'v ImplItem) {
walk_impl_item(self, ii)
}
fn visit_impl_item_ref(&mut self, ii: &'v ImplItemRef) {
walk_impl_item_ref(self, ii)
}
fn visit_trait_ref(&mut self, t: &'v TraitRef) {
walk_trait_ref(self, t)
}
fn visit_param_bound(&mut self, bounds: &'v GenericBound) {
walk_param_bound(self, bounds)
}
fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: TraitBoundModifier) {
walk_poly_trait_ref(self, t, m)
}
fn visit_variant_data(&mut self,
s: &'v VariantData,
_: Name,
_: &'v Generics,
_parent_id: NodeId,
_: Span) {
walk_struct_def(self, s)
}
fn visit_struct_field(&mut self, s: &'v StructField) {
walk_struct_field(self, s)
}
fn visit_enum_def(&mut self,
enum_definition: &'v EnumDef,
generics: &'v Generics,
item_id: NodeId,
_: Span) {
walk_enum_def(self, enum_definition, generics, item_id)
}
fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id)
}
fn visit_label(&mut self, label: &'v Label) {
walk_label(self, label)
}
fn visit_generic_arg(&mut self, generic_arg: &'v GenericArg) {
match generic_arg {
GenericArg::Lifetime(lt) => self.visit_lifetime(lt),
GenericArg::Type(ty) => self.visit_ty(ty),
}
}
fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
walk_lifetime(self, lifetime)
}
fn visit_qpath(&mut self, qpath: &'v QPath, id: NodeId, span: Span) {
walk_qpath(self, qpath, id, span)
}
fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
walk_path(self, path)
}
fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) {
walk_path_segment(self, path_span, path_segment)
}
fn visit_generic_args(&mut self, path_span: Span, generic_args: &'v GenericArgs) {
walk_generic_args(self, path_span, generic_args)
}
fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) {
walk_assoc_type_binding(self, type_binding)
}
fn visit_attribute(&mut self, _attr: &'v Attribute) {
}
fn visit_macro_def(&mut self, macro_def: &'v MacroDef) {
walk_macro_def(self, macro_def)
}
fn visit_vis(&mut self, vis: &'v Visibility) {
walk_vis(self, vis)
}
fn visit_associated_item_kind(&mut self, kind: &'v AssociatedItemKind) {
walk_associated_item_kind(self, kind);
}
fn visit_defaultness(&mut self, defaultness: &'v Defaultness) {
walk_defaultness(self, defaultness);
}
}
/// Walks the contents of a crate. See also `Crate::visit_all_items`.
pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_attribute, &krate.attrs);
walk_list!(visitor, visit_macro_def, &krate.exported_macros);
}
pub fn walk_macro_def<'v, V: Visitor<'v>>(visitor: &mut V, macro_def: &'v MacroDef) {
visitor.visit_id(macro_def.id);
visitor.visit_name(macro_def.span, macro_def.name);
walk_list!(visitor, visit_attribute, ¯o_def.attrs);
}
pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod, mod_node_id: NodeId) {
visitor.visit_id(mod_node_id);
for &item_id in &module.item_ids {
visitor.visit_nested_item(item_id);
}
}
pub fn walk_body<'v, V: Visitor<'v>>(visitor: &mut V, body: &'v Body) {
for argument in &body.arguments {
visitor.visit_id(argument.id);
visitor.visit_pat(&argument.pat);
}
visitor.visit_expr(&body.value);
}
pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) {
// Intentionally visiting the expr first - the initialization expr
// dominates the local's definition.
walk_list!(visitor, visit_expr, &local.init);
walk_list!(visitor, visit_attribute, local.attrs.iter());
visitor.visit_id(local.id);
visitor.visit_pat(&local.pat);
walk_list!(visitor, visit_ty, &local.ty);
}
pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, ident: Ident) {
visitor.visit_name(ident.span, ident.name);
}
pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
visitor.visit_ident(label.ident);
}
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.id);
match lifetime.name {
LifetimeName::Param(ParamName::Plain(ident)) => {
visitor.visit_ident(ident);
}
LifetimeName::Param(ParamName::Fresh(_)) |
LifetimeName::Static |
LifetimeName::Implicit |
LifetimeName::Underscore => {}
}
}
pub fn walk_poly_trait_ref<'v, V>(visitor: &mut V,
trait_ref: &'v PolyTraitRef,
_modifier: TraitBoundModifier)
where V: Visitor<'v>
{
walk_list!(visitor, visit_generic_param, &trait_ref.bound_generic_params);
visitor.visit_trait_ref(&trait_ref.trait_ref);
}
pub fn walk_trait_ref<'v, V>(visitor: &mut V, trait_ref: &'v TraitRef)
where V: Visitor<'v>
{
visitor.visit_id(trait_ref.ref_id);
visitor.visit_path(&trait_ref.path, trait_ref.ref_id)
}
pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
visitor.visit_vis(&item.vis);
visitor.visit_name(item.span, item.name);
match item.node {
ItemExternCrate(orig_name) => {
visitor.visit_id(item.id);
if let Some(orig_name) = orig_name {
visitor.visit_name(item.span, orig_name);
}
}
ItemUse(ref path, _) => {
visitor.visit_id(item.id);
visitor.visit_path(path, item.id);
}
ItemStatic(ref typ, _, body) |
ItemConst(ref typ, body) => {
visitor.visit_id(item.id);
visitor.visit_ty(typ);
visitor.visit_nested_body(body);
}
ItemFn(ref declaration, header, ref generics, body_id) => {
visitor.visit_fn(FnKind::ItemFn(item.name,
generics,
header,
&item.vis,
&item.attrs),
declaration,
body_id,
item.span,
item.id)
}
ItemMod(ref module) => {
// visit_mod() takes care of visiting the Item's NodeId
visitor.visit_mod(module, item.span, item.id)
}
ItemForeignMod(ref foreign_module) => {
visitor.visit_id(item.id);
walk_list!(visitor, visit_foreign_item, &foreign_module.items);
}
ItemGlobalAsm(_) => {
visitor.visit_id(item.id);
}
ItemTy(ref typ, ref type_parameters) => {
visitor.visit_id(item.id);
visitor.visit_ty(typ);
visitor.visit_generics(type_parameters)
}
ItemExistential(ExistTy {ref generics, ref bounds, impl_trait_fn}) => {
visitor.visit_id(item.id);
walk_generics(visitor, generics);
walk_list!(visitor, visit_param_bound, bounds);
if let Some(impl_trait_fn) = impl_trait_fn {
visitor.visit_def_mention(Def::Fn(impl_trait_fn))
}
}
ItemEnum(ref enum_definition, ref type_parameters) => {
visitor.visit_generics(type_parameters);
// visit_enum_def() takes care of visiting the Item's NodeId
visitor.visit_enum_def(enum_definition, type_parameters, item.id, item.span)
}
ItemImpl(.., ref type_parameters, ref opt_trait_reference, ref typ, ref impl_item_refs) => {
visitor.visit_id(item.id);
visitor.visit_generics(type_parameters);
walk_list!(visitor, visit_trait_ref, opt_trait_reference);
visitor.visit_ty(typ);
walk_list!(visitor, visit_impl_item_ref, impl_item_refs);
}
ItemStruct(ref struct_definition, ref generics) |
ItemUnion(ref struct_definition, ref generics) => {
visitor.visit_generics(generics);
visitor.visit_id(item.id);
visitor.visit_variant_data(struct_definition, item.name, generics, item.id, item.span);
}
ItemTrait(.., ref generics, ref bounds, ref trait_item_refs) => {
visitor.visit_id(item.id);
visitor.visit_generics(generics);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_trait_item_ref, trait_item_refs);
}
ItemTraitAlias(ref generics, ref bounds) => {
visitor.visit_id(item.id);
visitor.visit_generics(generics);
walk_list!(visitor, visit_param_bound, bounds);
}
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V,
enum_definition: &'v EnumDef,
generics: &'v Generics,
item_id: NodeId) {
visitor.visit_id(item_id);
walk_list!(visitor,
visit_variant,
&enum_definition.variants,
generics,
item_id);
}
pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V,
variant: &'v Variant,
generics: &'v Generics,
parent_item_id: NodeId) {
visitor.visit_name(variant.span, variant.node.name);
visitor.visit_variant_data(&variant.node.data,
variant.node.name,
generics,
parent_item_id,
variant.span);
walk_list!(visitor, visit_anon_const, &variant.node.disr_expr);
walk_list!(visitor, visit_attribute, &variant.node.attrs);
}
pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
visitor.visit_id(typ.id);
match typ.node {
TySlice(ref ty) => {
visitor.visit_ty(ty)
}
TyPtr(ref mutable_type) => {
visitor.visit_ty(&mutable_type.ty)
}
TyRptr(ref lifetime, ref mutable_type) => {
visitor.visit_lifetime(lifetime);
visitor.visit_ty(&mutable_type.ty)
}
TyNever => {},
TyTup(ref tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
TyBareFn(ref function_declaration) => {
walk_list!(visitor, visit_generic_param, &function_declaration.generic_params);
visitor.visit_fn_decl(&function_declaration.decl);
}
TyPath(ref qpath) => {
visitor.visit_qpath(qpath, typ.id, typ.span);
}
TyArray(ref ty, ref length) => {
visitor.visit_ty(ty);
visitor.visit_anon_const(length)
}
TyTraitObject(ref bounds, ref lifetime) => {
for bound in bounds {
visitor.visit_poly_trait_ref(bound, TraitBoundModifier::None);
}
visitor.visit_lifetime(lifetime);
}
TyImplTraitExistential(item_id, def_id, ref lifetimes) => {
visitor.visit_def_mention(Def::Existential(def_id));
visitor.visit_nested_item(item_id);
walk_list!(visitor, visit_lifetime, lifetimes);
}
TyTypeof(ref expression) => {
visitor.visit_anon_const(expression)
}
TyInfer | TyErr => {}
}
}
pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath, id: NodeId, span: Span) {
match *qpath {
QPath::Resolved(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(qself);
}
visitor.visit_path(path, id)
}
QPath::TypeRelative(ref qself, ref segment) => {
visitor.visit_ty(qself);
visitor.visit_path_segment(span, segment);
}
}
}
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
visitor.visit_def_mention(path.def);
for segment in &path.segments {
visitor.visit_path_segment(path.span, segment);
}
}
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
path_span: Span,
segment: &'v PathSegment) {
visitor.visit_ident(segment.ident);
if let Some(ref args) = segment.args {
visitor.visit_generic_args(path_span, args);
}
}
pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V,
_path_span: Span,
generic_args: &'v GenericArgs) |
pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
type_binding: &'v TypeBinding) {
visitor.visit_id(type_binding.id);
visitor.visit_ident(type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
visitor.visit_id(pattern.id);
match pattern.node {
PatKind::TupleStruct(ref qpath, ref children, _) => {
visitor.visit_qpath(qpath, pattern.id, pattern.span);
walk_list!(visitor, visit_pat, children);
}
PatKind::Path(ref qpath) => {
visitor.visit_qpath(qpath, pattern.id, pattern.span);
}
PatKind::Struct(ref qpath, ref fields, _) => {
visitor.visit_qpath(qpath, pattern.id, pattern.span);
for field in fields {
visitor.visit_id(field.node.id);
visitor.visit_ident(field.node.ident);
visitor.visit_pat(&field.node.pat)
}
}
PatKind::Tuple(ref tuple_elements, _) => {
walk_list!(visitor, visit_pat, tuple_elements);
}
PatKind::Box(ref subpattern) |
PatKind::Ref(ref subpattern, _) => {
visitor.visit_pat(subpattern)
}
PatKind::Binding(_, canonical_id, ident, ref optional_subpattern) => {
visitor.visit_def_mention(Def::Local(canonical_id));
visitor.visit_ident(ident);
walk_list!(visitor, visit_pat, optional_subpattern);
}
PatKind::Lit(ref expression) => visitor.visit_expr(expression),
PatKind::Range(ref lower_bound, ref upper_bound, _) => {
visitor.visit_expr(lower_bound);
visitor.visit_expr(upper_bound)
}
PatKind::Wild => (),
PatKind::Slice(ref prepatterns, ref slice_pattern, ref postpatterns) => {
walk_list!(visitor, visit_pat, prepatterns);
walk_list!(visitor, visit_pat, slice_pattern);
walk_list!(visitor, visit_pat, postpatterns);
}
}
}
pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem) {
visitor.visit_id(foreign_item.id);
visitor.visit_vis(&foreign_item.vis);
visitor.visit_name(foreign_item.span, foreign_item.name);
match foreign_item.node {
ForeignItemFn(ref function_declaration, ref param_names, ref generics) => {
visitor.visit_generics(generics);
visitor.visit_fn_decl(function_declaration);
for ¶m_name in param_names {
visitor.visit_ident(param_name);
}
}
ForeignItemStatic(ref typ, _) => visitor.visit_ty(typ),
ForeignItemType => (),
}
walk_list!(visitor, visit_attribute, &foreign_item.attrs);
}
pub fn walk_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, bound: &'v GenericBound) {
match *bound {
GenericBound::Trait(ref typ, modifier) => {
visitor.visit_poly_trait_ref(typ, modifier);
}
GenericBound::Outlives(ref lifetime) => visitor.visit_lifetime(lifetime),
}
}
pub fn walk_generic_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v GenericParam) {
visitor.visit_id(param.id);
walk_list!(visitor, visit_attribute, ¶m.attrs);
match param.name {
ParamName::Plain(ident) => visitor.visit_ident(ident),
ParamName::Fresh(_) => {}
}
match param.kind {
GenericParamKind::Lifetime { .. } => {}
GenericParamKind::Type { ref default, .. } => walk_list!(visitor, visit_ty, default),
}
walk_list!(visitor, visit_param_bound, ¶m.bounds);
}
pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) {
walk_list!(visitor, visit_generic_param, &generics.params);
visitor.visit_id(generics.where_clause.id);
walk_list!(visitor, visit_where_predicate, &generics.where_clause.predicates);
}
pub fn walk_where_predicate<'v, V: Visitor<'v>>(
visitor: &mut V,
predicate: &'v WherePredicate)
{
match predicate {
&WherePredicate::BoundPredicate(WhereBoundPredicate{ref bounded_ty,
ref bounds,
ref bound_generic_params,
..}) => {
visitor.visit_ty(bounded_ty);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_generic_param, bound_generic_params);
}
&WherePredicate::RegionPredicate(WhereRegionPredicate{ref lifetime,
ref bounds,
..}) => {
visitor.visit_lifetime(lifetime);
walk_list!(visitor, visit_param_bound, bounds);
}
&WherePredicate::EqPredicate(WhereEqPredicate{id,
ref lhs_ty,
ref rhs_ty,
..}) => {
visitor.visit_id(id);
visitor.visit_ty(lhs_ty);
visitor.visit_ty(rhs_ty);
}
}
}
pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FunctionRetTy) {
if let Return(ref output_ty) = *ret_ty {
visitor.visit_ty(output_ty)
}
}
pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl) {
for ty in &function_declaration.inputs {
visitor.visit_ty(ty)
}
walk_fn_ret_ty(visitor, &function_declaration.output)
}
pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'v>) {
match function_kind {
FnKind::ItemFn(_, generics, ..) => {
visitor.visit_generics(generics);
}
FnKind::Method(..) |
FnKind::Closure(_) => {}
}
}
pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
function_kind: FnKind<'v>,
function_declaration: &'v FnDecl,
body_id: BodyId,
_span: Span,
id: NodeId) {
visitor.visit_id(id);
visitor.visit_fn_decl(function_declaration);
walk_fn_kind(visitor, function_kind);
visitor.visit_nested_body(body_id)
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
visitor.visit_ident(trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
visitor.visit_generics(&trait_item.generics);
match trait_item.node {
TraitItemKind::Const(ref ty, default) => {
visitor.visit_id(trait_item.id);
visitor.visit_ty(ty);
walk_list!(visitor, visit_nested_body, default);
}
TraitItemKind::Method(ref sig, TraitMethod::Required(ref param_names)) => {
visitor.visit_id(trait_item.id);
visitor.visit_fn_decl(&sig.decl);
for ¶m_name in param_names {
visitor.visit_ident(param_name);
}
}
TraitItemKind::Method(ref sig, TraitMethod::Provided(body_id)) => {
visitor.visit_fn(FnKind::Method(trait_item.ident,
sig,
None,
&trait_item.attrs),
&sig.decl,
body_id,
trait_item.span,
trait_item.id);
}
TraitItemKind::Type(ref bounds, ref default) => {
visitor.visit_id(trait_item.id);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_ty, default);
}
}
}
pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) {
// NB: Deliberately force a compilation error if/when new fields are added.
let TraitItemRef { id, ident, ref kind, span: _, ref defaultness } = *trait_item_ref;
visitor.visit_nested_trait_item(id);
visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
visitor.visit_defaultness(defaultness);
}
pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem) {
// NB: Deliberately force a compilation error if/when new fields are added.
let ImplItem {
id: _,
hir_id: _,
ident,
ref vis,
ref defaultness,
ref attrs,
ref generics,
ref node,
span: _,
} = *impl_item;
visitor.visit_ident(ident);
visitor.visit_vis(vis);
visitor.visit_defaultness(defaultness);
walk_list!(visitor, visit_attribute, attrs);
visitor.visit_generics(generics);
match *node {
ImplItemKind::Const(ref ty, body) => {
visitor.visit_id(impl_item.id);
visitor.visit_ty(ty);
visitor.visit_nested_body(body);
}
ImplItemKind::Method(ref sig, body_id) => {
visitor.visit_fn(FnKind::Method(impl_item.ident,
sig,
Some(&impl_item.vis),
&impl_item.attrs),
&sig.decl,
body_id,
impl_item.span,
impl_item.id);
}
ImplItemKind::Type(ref ty) => {
visitor.visit_id(impl_item.id);
visitor.visit_ty(ty);
}
}
}
pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) {
// NB: Deliberately force a compilation error if/when new fields are added.
let ImplItemRef { id, ident, ref kind, span: _, ref vis, ref defaultness } = *impl_item_ref;
visitor.visit_nested_impl_item(id);
visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
visitor.visit_vis(vis);
visitor.visit_defaultness(defaultness);
}
pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, struct_definition: &'v VariantData) {
visitor.visit_id(struct_definition.id());
walk_list!(visitor, visit_struct_field, struct_definition.fields());
}
pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V, struct_field: &'v StructField) {
visitor.visit_id(struct_field.id);
visitor.visit_vis(&struct_field.vis);
visitor.visit_ident(struct_field.ident);
visitor.visit_ty(&struct_field.ty);
walk_list!(visitor, visit_attribute, &struct_field.attrs);
}
pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) {
visitor.visit_id(block.id);
walk_list!(visitor, visit_stmt, &block.stmts);
walk_list!(visitor, visit_expr, &block.expr);
}
pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt) {
match statement.node {
StmtDecl(ref declaration, id) => {
visitor.visit_id(id);
visitor.visit_decl(declaration)
}
StmtExpr(ref expression, id) |
StmtSemi(ref expression, id) => {
visitor.visit_id(id);
visitor.visit_expr(expression)
}
}
}
pub fn walk_decl<'v, V: Visitor<'v>>(visitor: &mut V, declaration: &'v Decl) {
match declaration.node {
DeclLocal(ref local) => visitor.visit_local(local),
DeclItem(item) => visitor.visit_nested_item(item),
}
}
pub fn walk_anon_const<'v, V: Visitor<'v>>(visitor: &mut V, constant: &'v AnonConst) {
visitor.visit_id(constant.id);
visitor.visit_nested_body(constant.body);
}
pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_id(expression.id);
walk_list!(visitor, visit_attribute, expression.attrs.iter());
match expression.node {
ExprBox(ref subexpression) => {
visitor.visit_expr(subexpression)
}
ExprArray(ref subexpressions) => {
walk_list!(visitor, visit_expr, subexpressions);
}
ExprRepeat(ref element, ref count) => {
visitor.visit_expr(element);
visitor.visit_anon_const(count)
}
ExprStruct(ref qpath, ref fields, ref optional_base) => {
visitor.visit_qpath(qpath, expression.id, expression.span);
for field in fields {
visitor.visit_id(field.id);
visitor.visit_ident(field.ident);
visitor.visit_expr(&field.expr)
}
walk_list!(visitor, visit_expr, optional_base);
}
ExprTup(ref subexpressions) => {
walk_list!(visitor, visit_expr, subexpressions);
}
ExprCall(ref callee_expression, ref arguments) => {
visitor.visit_expr(callee_expression);
walk_list!(visitor, visit_expr, arguments);
}
ExprMethodCall(ref segment, _, ref arguments) => {
visitor.visit_path_segment(expression.span, segment);
walk_list!(visitor, visit_expr, arguments);
}
ExprBinary(_, ref left_expression, ref right_expression) => {
visitor.visit_expr(left_expression);
visitor.visit_expr(right_expression)
}
ExprAddrOf(_, ref subexpression) | ExprUnary(_, ref subexpression) => {
visitor.visit_expr(subexpression)
}
ExprLit(_) => {}
ExprCast(ref subexpression, ref typ) | ExprType(ref subexpression, ref typ) => {
visitor.visit_expr(subexpression);
visitor.visit_ty(typ)
}
ExprIf(ref head_expression, ref if_block, ref optional_else) => {
visitor.visit_expr(head_expression);
visitor.visit_expr(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
ExprWhile(ref subexpression, ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
}
ExprLoop(ref block, ref opt_label, _) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
}
ExprMatch(ref subexpression, ref arms, _) => {
visitor.visit_expr(subexpression);
walk_list!(visitor, visit_arm, arms);
}
ExprClosure(_, ref function_declaration, body, _fn_decl_span, _gen) => {
visitor.visit_fn(FnKind::Closure(&expression.attrs),
function_declaration,
body,
expression.span,
expression.id)
}
ExprBlock(ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
}
ExprAssign(ref left_hand_expression, ref right_hand_expression) => {
visitor.visit_expr(right_hand_expression);
visitor.visit_expr(left_hand_expression)
}
ExprAssignOp(_, ref left_expression, ref right_expression) => {
visitor.visit_expr(right_expression);
visitor.visit_expr(left_expression)
}
ExprField(ref subexpression, ident) => {
visitor.visit_expr(subexpression);
visitor.visit_ident(ident);
}
ExprIndex(ref main_expression, ref index_expression) => {
visitor.visit_expr(main_expression);
visitor.visit_expr(index_expression)
}
ExprPath(ref qpath) => {
visitor.visit_qpath(qpath, expression.id, expression.span);
}
ExprBreak(ref destination, ref opt_expr) => {
if let Some(ref label) = destination.label {
visitor.visit_label(label);
match destination.target_id {
Ok(node_id) => visitor.visit_def_mention(Def::Label(node_id)),
Err(_) => {},
};
}
walk_list!(visitor, visit_expr, opt_expr);
}
ExprContinue(ref destination) => {
if let Some(ref label) = destination.label {
visitor.visit_label(label);
match destination.target_id {
Ok(node_id) => visitor.visit_def_mention(Def::Label(node_id)),
Err(_) => {},
};
}
}
ExprRet(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
}
ExprInlineAsm(_, ref outputs, ref inputs) => {
for output in outputs {
visitor.visit_expr(output)
}
for input in inputs {
visitor.visit_expr(input)
}
}
ExprYield(ref subexpression) => {
visitor.visit_expr(subexpression);
}
}
}
pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) {
walk_list!(visitor, visit_pat, &arm.pats);
walk_list!(visitor, visit_expr, &arm.guard);
visitor.visit_expr(&arm.body);
walk_list!(visitor, visit_attribute, &arm.attrs);
}
pub fn walk_vis<'v, V: Visitor<'v>>(visitor: &mut V, vis: &'v Visibility) {
if let Visibility::Restricted { ref path, id } = *vis {
visitor.visit_id(id);
visitor.visit_path(path, id)
}
}
pub fn walk_associated_item_kind<'v, V: Visitor<'v>>(_: &mut V, _: &'v AssociatedItemKind) {
// No visitable content here: this fn exists so you can call it if
// the right thing to do, should content be added in the future,
// would be to walk it.
}
pub fn walk_defaultness<'v, V: Visitor<'v>>(_: &mut V, _: &'v Defaultness) {
// No visitable content here: this fn exists so you can call it if
// the right thing to do, should content be added in the future,
// would be to walk it.
}
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq)]
pub struct IdRange {
pub min: NodeId,
pub max: NodeId,
}
impl IdRange {
pub fn max() -> IdRange {
IdRange {
min: NodeId::from_u32(u32::MAX),
max: NodeId::from_u32(u32::MIN),
}
}
pub fn empty(&self) -> bool {
self.min >= self.max
}
pub fn contains(&self, id: NodeId) -> bool {
id >= self.min && id < self.max
}
pub fn add(&mut self, id: NodeId) {
self.min = cmp::min(self.min, id);
self.max = cmp::max(self.max, NodeId::from_u32(id.as_u32() + 1));
}
}
pub struct IdRangeComputingVisitor<'a, 'hir: 'a> {
result: IdRange,
map: &'a map::Map<'hir>,
}
impl<'a, 'hir> IdRangeComputingVisitor<'a, 'hir> {
pub fn new(map: &'a map::Map<'hir>) -> IdRangeComputingVisitor<'a, 'hir> {
IdRangeComputingVisitor { result: IdRange::max(), map: map }
}
pub fn result(&self) -> IdRange {
self.result
}
}
impl<'a, 'hir> Visitor<'hir> for IdRangeComputingVisitor<'a, 'hir> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'hir> {
NestedVisitorMap::OnlyBodies(&self.map)
}
fn visit_id(&mut self, id: NodeId) {
self.result.add(id);
}
}
| {
walk_list!(visitor, visit_generic_arg, &generic_args.args);
walk_list!(visitor, visit_assoc_type_binding, &generic_args.bindings);
} |
DataLoader.js | import * as d3 from 'd3'
const DATA_PATH_BASE_2018 = 'data/2018/'
class | {
static async loadTextAsArray (url) {
const response = await window.fetch(url)
const text = await response.text()
const array = text.split('\n')
return array
}
static async loadCSV (url) {
return d3.csv(url)
}
static async getIndicatorsOfProgressReport () {
const url = `${DATA_PATH_BASE_2018}WP2018_Ind_Progress-2_Sep2018.csv`
return DataLoader.loadCSV(url)
}
static async getComparisonReport () {
const url = `${DATA_PATH_BASE_2018}WP2018_Comparison-2_updJUL20.csv`
return DataLoader.loadCSV(url)
}
static async getLeadershipReport () {
const url = `${DATA_PATH_BASE_2018}WP2018_Leadership.csv`
return DataLoader.loadCSV(url)
}
static async getMinistryReport () {
const url = `${DATA_PATH_BASE_2018}WP2018_Ministries-1.csv`
return DataLoader.loadCSV(url)
}
static async getFlowReport () {
//const url = ``
//return DataLoader.loadCSV(url)
return [];
}
static async getOccupationRegionReport () {
const url = `${DATA_PATH_BASE_2018}WP2018_Rep_Occ_Rgn_v3_Oct16.csv`
return DataLoader.loadCSV(url)
}
static async getEmployeeCount () {
const url = `${DATA_PATH_BASE_2018}WP2018_EmpCounts.csv`
return DataLoader.loadCSV(url)
}
static async loadAllData () {
const iopReport = await DataLoader.getIndicatorsOfProgressReport()
const comparisonReport = await DataLoader.getComparisonReport()
const leadershipReport = await DataLoader.getLeadershipReport()
const ministryReport = await DataLoader.getMinistryReport()
const orReport = await DataLoader.getOccupationRegionReport()
const flowReport = await DataLoader.getFlowReport()
const employeeCount = await DataLoader.getEmployeeCount()
return {
iopReport,
comparisonReport,
leadershipReport,
ministryReport,
orReport,
flowReport,
employeeCount
}
}
}
export default DataLoader
| DataLoader |
simulator_factory.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2014-2018, Lars Asplund [email protected]
"""
Create simulator instances
"""
import os
from vunit.modelsim_interface import ModelSimInterface
from vunit.activehdl_interface import ActiveHDLInterface
from vunit.rivierapro_interface import RivieraProInterface
from vunit.ghdl_interface import GHDLInterface
from vunit.incisive_interface import IncisiveInterface
from vunit.simulator_interface import (BooleanOption,
ListOfStringOption,
VHDLAssertLevelOption)
class SimulatorFactory(object):
"""
Create simulator instances
"""
@staticmethod
def | ():
"""
Return a list of supported simulator classes
"""
return [ModelSimInterface,
RivieraProInterface,
ActiveHDLInterface,
GHDLInterface,
IncisiveInterface]
def _extract_compile_options(self):
"""
Return all supported compile options
"""
result = dict((opt.name, opt) for opt in
[BooleanOption("disable_coverage")])
for sim_class in self.supported_simulators():
for opt in sim_class.compile_options:
assert hasattr(opt, "name")
assert hasattr(opt, "validate")
assert opt.name.startswith(sim_class.name + ".")
assert opt.name not in result
result[opt.name] = opt
return result
def _extract_sim_options(self):
"""
Return all supported sim options
"""
result = dict((opt.name, opt) for opt in
[VHDLAssertLevelOption(),
BooleanOption("disable_ieee_warnings"),
ListOfStringOption("pli")])
for sim_class in self.supported_simulators():
for opt in sim_class.sim_options:
assert hasattr(opt, "name")
assert hasattr(opt, "validate")
assert opt.name.startswith(sim_class.name + ".")
assert opt.name not in result
result[opt.name] = opt
return result
def check_sim_option(self, name, value):
"""
Check that sim_option has legal name and value
"""
known_options = sorted(list(self._sim_options.keys()))
if name not in self._sim_options:
raise ValueError("Unknown sim_option %r, expected one of %r" %
(name, known_options))
self._sim_options[name].validate(value)
def check_compile_option_name(self, name):
"""
Check that the compile option is valid
"""
known_options = sorted(list(self._compile_options.keys()))
if name not in known_options:
raise ValueError("Unknown compile_option %r, expected one of %r" %
(name, known_options))
def check_compile_option(self, name, value):
"""
Check that the compile option is valid
"""
self.check_compile_option_name(name)
self._compile_options[name].validate(value)
def select_simulator(self):
"""
Select simulator class, either from VUNIT_SIMULATOR environment variable
or the first available
"""
available_simulators = self._detect_available_simulators()
name_mapping = {simulator_class.name: simulator_class for simulator_class in self.supported_simulators()}
if not available_simulators:
return None
environ_name = "VUNIT_SIMULATOR"
if environ_name in os.environ:
simulator_name = os.environ[environ_name]
if simulator_name not in name_mapping:
raise RuntimeError(
("Simulator from " + environ_name + " environment variable %r is not supported. "
"Supported simulators are %r")
% (simulator_name, name_mapping.keys()))
simulator_class = name_mapping[simulator_name]
else:
simulator_class = available_simulators[0]
return simulator_class
def add_arguments(self, parser):
"""
Add command line arguments to parser
"""
parser.add_argument('-g', '--gui',
action="store_true",
default=False,
help=("Open test case(s) in simulator gui with top level pre loaded"))
for sim in self.supported_simulators():
sim.add_arguments(parser)
def __init__(self):
self._compile_options = self._extract_compile_options()
self._sim_options = self._extract_sim_options()
def _detect_available_simulators(self):
"""
Detect available simulators and return a list
"""
return [simulator_class
for simulator_class in self.supported_simulators()
if simulator_class.is_available()]
@property
def has_simulator(self):
return bool(self._detect_available_simulators())
SIMULATOR_FACTORY = SimulatorFactory()
| supported_simulators |
wsgi.py | """
WSGI config for djgumroad project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from pathlib import Path
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# djgumroad directory.
ROOT_DIR = Path(__file__).resolve(strict=True).parent.parent
sys.path.append(str(ROOT_DIR / "djgumroad"))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") | # setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application) |
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION |
test_motion_planning.py | import igibson
from igibson.envs.igibson_env import iGibsonEnv
from time import time
import os
from igibson.utils.assets_utils import download_assets, download_demo_data
from igibson.utils.motion_planning_wrapper import MotionPlanningWrapper
import numpy as np
import matplotlib.pyplot as plt
def test_occupancy_grid():
print("Test env")
download_assets()
download_demo_data()
config_filename = os.path.join(igibson.root_path, 'test', 'test_house_occupancy_grid.yaml')
nav_env = iGibsonEnv(config_file=config_filename, mode='headless')
nav_env.reset()
nav_env.robots[0].set_position_orientation([0,0,0],[0,0,0,1])
nav_env.simulator.step()
action = nav_env.action_space.sample()
ts = nav_env.step(action)
assert np.sum(ts[0]['occupancy_grid'] == 0) > 0
assert np.sum(ts[0]['occupancy_grid'] == 1) > 0
plt.imshow(ts[0]['occupancy_grid'][:,:,0])
plt.colorbar()
plt.savefig('occupancy_grid.png')
nav_env.clean()
def test_base_planning():
print("Test env")
download_assets()
download_demo_data()
config_filename = os.path.join(igibson.root_path, 'test', 'test_house_occupancy_grid.yaml') |
nav_env = iGibsonEnv(config_file=config_filename, mode='headless')
motion_planner = MotionPlanningWrapper(nav_env)
state = nav_env.reset()
nav_env.robots[0].set_position_orientation([0,0,0],[0,0,0,1])
nav_env.simulator.step()
plan = None
itr = 0
while plan is None and itr < 10:
plan = motion_planner.plan_base_motion([0.5,0,0])
print(plan)
itr += 1
motion_planner.dry_run_base_plan(plan)
assert len(plan) > 0
nav_env.clean() | |
secondary_indexes_test.py | import random, re, time, uuid
from dtest import Tester, debug
from pytools import since
from pyassertions import assert_invalid
from cassandra import InvalidRequest
from cassandra.query import BatchStatement, SimpleStatement
from cassandra.protocol import ConfigurationException
class TestSecondaryIndexes(Tester):
def bug3367_test(self):
cluster = self.cluster
cluster.populate(1).start()
[node1] = cluster.nodelist()
cursor = self.patient_cql_connection(node1)
self.create_ks(cursor, 'ks', 1)
columns = {"password": "varchar", "gender": "varchar", "session_token": "varchar", "state": "varchar", "birth_year": "bigint"}
self.create_cf(cursor, 'users', columns=columns)
# insert data
cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user1', 'ch@ngem3a', 'f', 'TX', 1968);")
cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user2', 'ch@ngem3b', 'm', 'CA', 1971);")
# create index
cursor.execute("CREATE INDEX gender_key ON users (gender);")
cursor.execute("CREATE INDEX state_key ON users (state);")
cursor.execute("CREATE INDEX birth_year_key ON users (birth_year);")
# insert data
cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user3', 'ch@ngem3c', 'f', 'FL', 1978);")
cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user4', 'ch@ngem3d', 'm', 'TX', 1974);")
result = cursor.execute("SELECT * FROM users;")
assert len(result) == 4, "Expecting 4 users, got" + str(result)
result = cursor.execute("SELECT * FROM users WHERE state='TX';")
assert len(result) == 2, "Expecting 2 users, got" + str(result)
result = cursor.execute("SELECT * FROM users WHERE state='CA';")
assert len(result) == 1, "Expecting 1 users, got" + str(result)
@since('2.1')
def test_low_cardinality_indexes(self):
"""
Checks that low-cardinality secondary index subqueries are executed
concurrently
"""
cluster = self.cluster
cluster.populate(3).start()
node1, node2, node3 = cluster.nodelist()
conn = self.patient_cql_connection(node1, version='3.0.0')
cursor = conn
cursor.max_trace_wait = 120
cursor.execute("CREATE KEYSPACE ks WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': '1'};")
cursor.execute("CREATE TABLE ks.cf (a text PRIMARY KEY, b text);")
cursor.execute("CREATE INDEX b_index ON ks.cf (b);")
num_rows = 100
for i in range(num_rows):
indexed_value = i % (num_rows / 3)
# use the same indexed value three times
cursor.execute("INSERT INTO ks.cf (a, b) VALUES ('%d', '%d');" % (i, indexed_value))
cluster.flush()
def check_trace_events(trace):
# we should see multiple requests get enqueued prior to index scan
# execution happening
# Look for messages like:
# Submitting range requests on 769 ranges with a concurrency of 769 (0.0070312 rows per range expected)
regex = r"Submitting range requests on [0-9]+ ranges with a concurrency of (\d+) \(([0-9.]+) rows per range expected\)"
for event in trace.events:
desc = event.description
match = re.match(regex, desc)
if match:
concurrency = int(match.group(1))
expected_per_range = float(match.group(2))
self.assertTrue(concurrency > 1, "Expected more than 1 concurrent range request, got %d" % concurrency)
self.assertTrue(expected_per_range > 0)
break
else:
self.fail("Didn't find matching trace event")
query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1';")
result = cursor.execute(query, trace=True)
self.assertEqual(3, len(result))
check_trace_events(query.trace)
query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1' LIMIT 100;")
result = cursor.execute(query, trace=True)
self.assertEqual(3, len(result))
check_trace_events(query.trace)
query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1' LIMIT 3;")
result = cursor.execute(query, trace=True)
self.assertEqual(3, len(result))
check_trace_events(query.trace)
for limit in (1, 2):
result = cursor.execute("SELECT * FROM ks.cf WHERE b='1' LIMIT %d;" % (limit,))
self.assertEqual(limit, len(result))
@since('2.1')
def test_6924_dropping_ks(self):
|
@since('2.1')
def test_6924_dropping_cf(self):
"""Tests CASSANDRA-6924
Data inserted immediately after dropping and recreating an
indexed column family is not included in the index.
"""
# Reproducing requires at least 3 nodes:
cluster = self.cluster
cluster.populate(3).start()
node1, node2, node3 = cluster.nodelist()
conn = self.patient_cql_connection(node1)
cursor = conn
self.create_ks(cursor, 'ks', 1)
#This only occurs when dropping and recreating with
#the same name, so loop through this test a few times:
for i in range(10):
debug("round %s" % i)
try:
cursor.execute("DROP COLUMNFAMILY ks.cf")
except InvalidRequest:
pass
cursor.execute("CREATE TABLE ks.cf (key text PRIMARY KEY, col1 text);")
cursor.execute("CREATE INDEX on ks.cf (col1);")
for r in range(10):
stmt = "INSERT INTO ks.cf (key, col1) VALUES ('%s','asdf');" % r
cursor.execute(stmt)
self.wait_for_schema_agreement(cursor)
rows = cursor.execute("select count(*) from ks.cf WHERE col1='asdf'")
count = rows[0][0]
self.assertEqual(count, 10)
@since('2.0')
def test_8280_validate_indexed_values(self):
"""Tests CASSANDRA-8280
Reject inserts & updates where values of any indexed
column is > 64k
"""
cluster = self.cluster
cluster.populate(1).start()
node1 = cluster.nodelist()[0]
conn = self.patient_cql_connection(node1)
cursor = conn
self.create_ks(cursor, 'ks', 1)
self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b int, c text, PRIMARY KEY (a))",
"CREATE INDEX ON %s(c)",
"INSERT INTO %s (a, b, c) VALUES (0, 0, ?)",
cursor)
self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b text, c int, PRIMARY KEY (a, b))",
"CREATE INDEX ON %s(b)",
"INSERT INTO %s (a, b, c) VALUES (0, ?, 0)",
cursor)
self.insert_row_with_oversize_value("CREATE TABLE %s(a text, b int, c int, PRIMARY KEY ((a, b)))",
"CREATE INDEX ON %s(a)",
"INSERT INTO %s (a, b, c) VALUES (?, 0, 0)",
cursor)
self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b text, PRIMARY KEY (a)) WITH COMPACT STORAGE",
"CREATE INDEX ON %s(b)",
"INSERT INTO %s (a, b) VALUES (0, ?)",
cursor)
def insert_row_with_oversize_value(self, create_table_cql, create_index_cql, insert_cql, cursor):
""" Validate two variations of the supplied insert statement, first
as it is and then again transformed into a conditional statement
"""
table_name = "table_" + str(int(round(time.time() * 1000)))
cursor.execute(create_table_cql % table_name)
cursor.execute(create_index_cql % table_name)
value = "X" * 65536
self._assert_invalid_request(cursor, insert_cql % table_name, value)
self._assert_invalid_request(cursor, (insert_cql % table_name) + ' IF NOT EXISTS', value)
def _assert_invalid_request(self, cursor, insert_cql, value):
""" Perform two executions of the supplied statement, as a
single statement and again as part of a batch
"""
prepared = cursor.prepare(insert_cql)
self._execute_and_fail(lambda: cursor.execute(prepared, [value]), insert_cql)
batch = BatchStatement()
batch.add(prepared, [value])
self._execute_and_fail(lambda: cursor.execute(batch), insert_cql)
def _execute_and_fail(self, operation, cql_string):
try:
operation()
assert False, "Expecting query %s to be invalid" % cql_string
except AssertionError as e:
raise e
except InvalidRequest:
pass
def wait_for_schema_agreement(self, cursor):
rows = cursor.execute("SELECT schema_version FROM system.local")
local_version = rows[0]
all_match = True
rows = cursor.execute("SELECT schema_version FROM system.peers")
for peer_version in rows:
if peer_version != local_version:
all_match = False
break
if all_match:
return
else:
time.sleep(0.10)
self.wait_for_schema_agreement(cursor)
class TestSecondaryIndexesOnCollections(Tester):
def __init__(self, *args, **kwargs):
Tester.__init__(self, *args, **kwargs)
@since('2.1')
def test_list_indexes(self):
"""
Checks that secondary indexes on lists work for querying.
"""
cluster = self.cluster
cluster.populate(1).start()
[node1] = cluster.nodelist()
cursor = self.patient_cql_connection(node1)
self.create_ks(cursor, 'list_index_search', 1)
stmt = ("CREATE TABLE list_index_search.users ("
"user_id uuid PRIMARY KEY,"
"email text,"
"uuids list<uuid>"
");")
cursor.execute(stmt)
# no index present yet, make sure there's an error trying to query column
stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}"
).format(some_uuid=uuid.uuid4())
assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators')
# add index and query again (even though there are no rows in the table yet)
stmt = "CREATE INDEX user_uuids on list_index_search.users (uuids);"
cursor.execute(stmt)
stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4())
row = cursor.execute(stmt)
self.assertEqual(0, len(row))
# add a row which doesn't specify data for the indexed column, and query again
user1_uuid = uuid.uuid4()
stmt = ("INSERT INTO list_index_search.users (user_id, email)"
"values ({user_id}, '[email protected]')"
).format(user_id=user1_uuid)
cursor.execute(stmt)
stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4())
row = cursor.execute(stmt)
self.assertEqual(0, len(row))
_id = uuid.uuid4()
# alter the row to add a single item to the indexed list
stmt = ("UPDATE list_index_search.users set uuids = [{id}] where user_id = {user_id}"
).format(id=_id, user_id=user1_uuid)
cursor.execute(stmt)
stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=_id)
row = cursor.execute(stmt)
self.assertEqual(1, len(row))
# add a bunch of user records and query them back
shared_uuid = uuid.uuid4() # this uuid will be on all records
log = []
for i in range(50000):
user_uuid = uuid.uuid4()
unshared_uuid = uuid.uuid4()
# give each record a unique email address using the int index
stmt = ("INSERT INTO list_index_search.users (user_id, email, uuids)"
"values ({user_uuid}, '{prefix}@example.com', [{s_uuid}, {u_uuid}])"
).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid=unshared_uuid)
cursor.execute(stmt)
log.append(
{'user_id': user_uuid,
'email':str(i)+'@example.com',
'unshared_uuid':unshared_uuid}
)
# confirm there is now 50k rows with the 'shared' uuid above in the secondary index
stmt = ("SELECT * from list_index_search.users where uuids contains {shared_uuid}").format(shared_uuid=shared_uuid)
rows = cursor.execute(stmt)
result = [row for row in rows]
self.assertEqual(50000, len(result))
# shuffle the log in-place, and double-check a slice of records by querying the secondary index
random.shuffle(log)
for log_entry in log[:1000]:
stmt = ("SELECT user_id, email, uuids FROM list_index_search.users where uuids contains {unshared_uuid}"
).format(unshared_uuid=log_entry['unshared_uuid'])
rows = cursor.execute(stmt)
self.assertEqual(1, len(rows))
db_user_id, db_email, db_uuids = rows[0]
self.assertEqual(db_user_id, log_entry['user_id'])
self.assertEqual(db_email, log_entry['email'])
self.assertEqual(str(db_uuids[0]), str(shared_uuid))
self.assertEqual(str(db_uuids[1]), str(log_entry['unshared_uuid']))
@since('2.1')
def test_set_indexes(self):
"""
Checks that secondary indexes on sets work for querying.
"""
cluster = self.cluster
cluster.populate(1).start()
[node1] = cluster.nodelist()
cursor = self.patient_cql_connection(node1)
self.create_ks(cursor, 'set_index_search', 1)
stmt = ("CREATE TABLE set_index_search.users ("
"user_id uuid PRIMARY KEY,"
"email text,"
"uuids set<uuid>);")
cursor.execute(stmt)
# no index present yet, make sure there's an error trying to query column
stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4())
assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators')
# add index and query again (even though there are no rows in the table yet)
stmt = "CREATE INDEX user_uuids on set_index_search.users (uuids);"
cursor.execute(stmt)
stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4())
row = cursor.execute(stmt)
self.assertEqual(0, len(row))
# add a row which doesn't specify data for the indexed column, and query again
user1_uuid = uuid.uuid4()
stmt = ("INSERT INTO set_index_search.users (user_id, email) values ({user_id}, '[email protected]')"
).format(user_id=user1_uuid)
cursor.execute(stmt)
stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4())
row = cursor.execute(stmt)
self.assertEqual(0, len(row))
_id = uuid.uuid4()
# alter the row to add a single item to the indexed set
stmt = ("UPDATE set_index_search.users set uuids = {{{id}}} where user_id = {user_id}").format(id=_id, user_id=user1_uuid)
cursor.execute(stmt)
stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=_id)
row = cursor.execute(stmt)
self.assertEqual(1, len(row))
# add a bunch of user records and query them back
shared_uuid = uuid.uuid4() # this uuid will be on all records
log = []
for i in range(50000):
user_uuid = uuid.uuid4()
unshared_uuid = uuid.uuid4()
# give each record a unique email address using the int index
stmt = ("INSERT INTO set_index_search.users (user_id, email, uuids)"
"values ({user_uuid}, '{prefix}@example.com', {{{s_uuid}, {u_uuid}}})"
).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid=unshared_uuid)
cursor.execute(stmt)
log.append(
{'user_id': user_uuid,
'email':str(i)+'@example.com',
'unshared_uuid':unshared_uuid}
)
# confirm there is now 50k rows with the 'shared' uuid above in the secondary index
stmt = ("SELECT * from set_index_search.users where uuids contains {shared_uuid}").format(shared_uuid=shared_uuid)
rows = cursor.execute(stmt)
result = [row for row in rows]
self.assertEqual(50000, len(result))
# shuffle the log in-place, and double-check a slice of records by querying the secondary index
random.shuffle(log)
for log_entry in log[:1000]:
stmt = ("SELECT user_id, email, uuids FROM set_index_search.users where uuids contains {unshared_uuid}"
).format(unshared_uuid=log_entry['unshared_uuid'])
rows = cursor.execute(stmt)
self.assertEqual(1, len(rows))
db_user_id, db_email, db_uuids = rows[0]
self.assertEqual(db_user_id, log_entry['user_id'])
self.assertEqual(db_email, log_entry['email'])
self.assertTrue(shared_uuid in db_uuids)
self.assertTrue(log_entry['unshared_uuid'] in db_uuids)
@since('2.1')
def test_map_indexes(self):
"""
Checks that secondary indexes on maps work for querying on both keys and values
"""
cluster = self.cluster
cluster.populate(1).start()
[node1] = cluster.nodelist()
cursor = self.patient_cql_connection(node1)
self.create_ks(cursor, 'map_index_search', 1)
stmt = ("CREATE TABLE map_index_search.users ("
"user_id uuid PRIMARY KEY,"
"email text,"
"uuids map<uuid, uuid>);")
cursor.execute(stmt)
# no index present yet, make sure there's an error trying to query column
stmt = ("SELECT * from map_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4())
assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators')
stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}"
).format(some_uuid=uuid.uuid4())
assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators')
# add index on keys and query again (even though there are no rows in the table yet)
stmt = "CREATE INDEX user_uuids on map_index_search.users (KEYS(uuids));"
cursor.execute(stmt)
stmt = "SELECT * from map_index_search.users where uuids contains key {some_uuid}".format(some_uuid=uuid.uuid4())
rows = cursor.execute(stmt)
self.assertEqual(0, len(rows))
# add a row which doesn't specify data for the indexed column, and query again
user1_uuid = uuid.uuid4()
stmt = ("INSERT INTO map_index_search.users (user_id, email)"
"values ({user_id}, '[email protected]')"
).format(user_id=user1_uuid)
cursor.execute(stmt)
stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}").format(some_uuid=uuid.uuid4())
rows = cursor.execute(stmt)
self.assertEqual(0, len(rows))
_id = uuid.uuid4()
# alter the row to add a single item to the indexed map
stmt = ("UPDATE map_index_search.users set uuids = {{{id}:{user_id}}} where user_id = {user_id}"
).format(id=_id, user_id=user1_uuid)
cursor.execute(stmt)
stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}").format(some_uuid=_id)
rows = cursor.execute(stmt)
self.assertEqual(1, len(rows))
# add a bunch of user records and query them back
shared_uuid = uuid.uuid4() # this uuid will be on all records
log = []
for i in range(50000):
user_uuid = uuid.uuid4()
unshared_uuid1 = uuid.uuid4()
unshared_uuid2 = uuid.uuid4()
# give each record a unique email address using the int index, add unique ids for keys and values
stmt = ("INSERT INTO map_index_search.users (user_id, email, uuids)"
"values ({user_uuid}, '{prefix}@example.com', {{{u_uuid1}:{u_uuid2}, {s_uuid}:{s_uuid}}})"
).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid1=unshared_uuid1, u_uuid2=unshared_uuid2)
cursor.execute(stmt)
log.append(
{'user_id': user_uuid,
'email':str(i)+'@example.com',
'unshared_uuid1':unshared_uuid1,
'unshared_uuid2':unshared_uuid2}
)
# confirm there is now 50k rows with the 'shared' uuid above in the secondary index
stmt = ("SELECT * from map_index_search.users where uuids contains key {shared_uuid}"
).format(shared_uuid=shared_uuid)
rows = cursor.execute(stmt)
result = [row for row in rows]
self.assertEqual(50000, len(result))
# shuffle the log in-place, and double-check a slice of records by querying the secondary index on keys
random.shuffle(log)
for log_entry in log[:1000]:
stmt = ("SELECT user_id, email, uuids FROM map_index_search.users where uuids contains key {unshared_uuid1}"
).format(unshared_uuid1=log_entry['unshared_uuid1'])
row = cursor.execute(stmt)
rows = self.assertEqual(1, len(row))
db_user_id, db_email, db_uuids = row[0]
self.assertEqual(db_user_id, log_entry['user_id'])
self.assertEqual(db_email, log_entry['email'])
self.assertTrue(shared_uuid in db_uuids)
self.assertTrue(log_entry['unshared_uuid1'] in db_uuids)
# attempt to add an index on map values as well (should fail)
stmt = "CREATE INDEX user_uuids on map_index_search.users (uuids);"
matching = "Cannot create index on uuids values, an index on uuids keys already exists and indexing a map on both keys and values at the same time is not currently supported"
assert_invalid(cursor, stmt, matching)
# since cannot have index on map keys and values remove current index on keys
stmt = "DROP INDEX user_uuids;"
cursor.execute(stmt)
# add index on values (will index rows added prior)
stmt = "CREATE INDEX user_uids on map_index_search.users (uuids);"
cursor.execute(stmt)
# shuffle the log in-place, and double-check a slice of records by querying the secondary index
random.shuffle(log)
time.sleep(10)
# since we already inserted unique ids for values as well, check that appropriate recors are found
for log_entry in log[:1000]:
stmt = ("SELECT user_id, email, uuids FROM map_index_search.users where uuids contains {unshared_uuid2}"
).format(unshared_uuid2=log_entry['unshared_uuid2'])
rows = cursor.execute(stmt)
self.assertEqual(1, len(rows))
db_user_id, db_email, db_uuids = rows[0]
self.assertEqual(db_user_id, log_entry['user_id'])
self.assertEqual(db_email, log_entry['email'])
self.assertTrue(shared_uuid in db_uuids)
self.assertTrue(log_entry['unshared_uuid2'] in db_uuids.values())
| """Tests CASSANDRA-6924
Data inserted immediately after dropping and recreating a
keyspace with an indexed column familiy is not included
in the index.
"""
# Reproducing requires at least 3 nodes:
cluster = self.cluster
cluster.populate(3).start()
node1, node2, node3 = cluster.nodelist()
conn = self.patient_cql_connection(node1)
cursor = conn
#This only occurs when dropping and recreating with
#the same name, so loop through this test a few times:
for i in range(10):
debug("round %s" % i)
try:
cursor.execute("DROP KEYSPACE ks")
except ConfigurationException:
pass
self.create_ks(cursor, 'ks', 1)
cursor.execute("CREATE TABLE ks.cf (key text PRIMARY KEY, col1 text);")
cursor.execute("CREATE INDEX on ks.cf (col1);")
for r in range(10):
stmt = "INSERT INTO ks.cf (key, col1) VALUES ('%s','asdf');" % r
cursor.execute(stmt)
self.wait_for_schema_agreement(cursor)
rows = cursor.execute("select count(*) from ks.cf WHERE col1='asdf'")
count = rows[0][0]
self.assertEqual(count, 10) |
network.go | // Copyright 2019 The Prism Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"math"
"math/rand"
"gonum.org/v1/plot/plotter"
"github.com/pointlander/datum/iris"
"github.com/pointlander/gradient/tf32"
)
// Network is a neural network
type Network struct {
*rand.Rand
BatchSize int
Input, Output tf32.V
W, B [4]tf32.V
Parameters []*tf32.V
Ones tf32.V
L [4]tf32.Meta
Cost tf32.Meta
}
// NewNetwork creates a new neural network
func NewNetwork(seed int64, batchSize int) *Network {
n := Network{
Rand: rand.New(rand.NewSource(seed)),
BatchSize: batchSize,
Parameters: make([]*tf32.V, 0, 8),
}
n.Input, n.Output = tf32.NewV(4, batchSize), tf32.NewV(4, batchSize)
n.W[0], n.B[0] = tf32.NewV(4, Width), tf32.NewV(Width)
n.W[1], n.B[1] = tf32.NewV(Width, Width2), tf32.NewV(Width2)
n.W[2], n.B[2] = tf32.NewV(Width2, Width3), tf32.NewV(Width3)
n.W[3], n.B[3] = tf32.NewV(Width3, 4), tf32.NewV(4)
for i := range n.W {
n.Parameters = append(n.Parameters, &n.W[i], &n.B[i])
}
w := float32(math.Sqrt(1 / 4.0))
for j, p := range n.Parameters {
if j%2 == 0 {
for i := 0; i < cap(p.X); i++ {
p.X = append(p.X, n.Random32(w))
}
} else {
for i := 0; i < cap(p.X); i++ {
p.X = append(p.X, 0)
}
}
}
n.Ones = tf32.NewV(batchSize)
for i := 0; i < cap(n.Ones.X); i++ {
n.Ones.X = append(n.Ones.X, 1)
}
last := n.Input.Meta()
for i := range n.L {
n.L[i] = tf32.Sigmoid(tf32.Add(tf32.Mul(n.W[i].Meta(), last), n.B[i].Meta()))
last = n.L[i]
}
n.Cost = tf32.Avg(tf32.Sub(n.Ones.Meta(), tf32.Similarity(last, n.Output.Meta())))
//n.Cost = tf32.Avg(tf32.Quadratic(last, n.Output.Meta()))
return &n
}
// Random32 generates a bimodal random number
func (n *Network) Random32(w float32) float32 {
if n.Intn(2) == 0 {
return -2*w + float32(n.NormFloat64())*w
}
return 2*w + float32(n.NormFloat64())*w
}
// Train trains the neural network on training data for iterations
func (n *Network) Train(training []iris.Iris, iterations int) (cost, variance plotter.XYs) { | for i := range training {
data = append(data, &training[i])
}
cost, variance = make(plotter.XYs, 0, iterations), make(plotter.XYs, 0, iterations)
for i := 0; i < iterations; i++ {
for i := range data {
j := i + n.Intn(length-i)
data[i], data[j] = data[j], data[i]
}
total := float32(0.0)
for j := 0; j < length; j += n.BatchSize {
for _, p := range n.Parameters {
p.Zero()
}
n.Input.Zero()
n.Output.Zero()
n.Ones.Zero()
values := make([]float32, 0, 4*n.BatchSize)
for k := 0; k < n.BatchSize; k++ {
index := (j + k) % length
for _, measure := range data[index].Measures {
values = append(values, float32(measure))
}
}
n.Input.Set(values)
n.Output.Set(values)
total += tf32.Gradient(n.Cost).X[0]
norm := float32(0)
for k, p := range n.Parameters {
for l, d := range p.D {
if math.IsNaN(float64(d)) {
fmt.Println(d, k, l)
return cost, variance
} else if math.IsInf(float64(d), 0) {
fmt.Println(d, k, l)
return cost, variance
}
norm += d * d
}
}
norm = float32(math.Sqrt(float64(norm)))
if norm > 1 {
scaling := 1 / norm
for _, p := range n.Parameters {
for l, d := range p.D {
p.X[l] -= Eta * d * scaling
}
}
} else {
for _, p := range n.Parameters {
for l, d := range p.D {
p.X[l] -= Eta * d
}
}
}
}
cost = append(cost, plotter.XY{X: float64(i), Y: float64(total)})
sum, sumSquared, count := float32(0), float32(0), 0
for _, p := range n.Parameters {
for _, x := range p.X {
sum += x
sumSquared += x * x
count++
}
}
sumSquared /= float32(count)
sum /= float32(count)
variance = append(variance, plotter.XY{X: float64(i), Y: float64(sumSquared - sum*sum)})
}
return cost, variance
}
// Embeddings generates the embeddings
func (n *Network) Embeddings(training []iris.Iris) Embeddings {
input := tf32.NewV(4)
l1 := tf32.Sigmoid(tf32.Add(tf32.Mul(n.W[0].Meta(), input.Meta()), n.B[0].Meta()))
l2 := tf32.Sigmoid(tf32.Add(tf32.Mul(n.W[1].Meta(), l1), n.B[1].Meta()))
embeddings := Embeddings{
Columns: Width2,
Network: n,
Embeddings: make([]Embedding, 0, len(training)),
}
for i := range training {
values := make([]float32, 0, 4)
for _, measure := range training[i].Measures {
values = append(values, float32(measure))
}
input.Set(values)
embedding := Embedding{
Iris: training[i],
Source: i,
Features: make([]float64, 0, Width2),
}
l2(func(a *tf32.V) bool {
for _, value := range a.X {
embedding.Features = append(embedding.Features, float64(value))
}
return true
})
embeddings.Embeddings = append(embeddings.Embeddings, embedding)
}
return embeddings
} | length := len(training)
data := make([]*iris.Iris, 0, length) |
__main__.py | # Author: Hansheng Zhao <[email protected]> (https://www.zhs.me)
from configparser import RawConfigParser
from argparse import ArgumentParser
def create_config(file_path):
# create config parser instance
|
# create argument parser instance
argparse = ArgumentParser(
prog='python[2|3] -m utilize', description='''
Initailize default config file for Utilize class.
''', epilog='Happy coding :-)'
)
# add positional argument
argparse.add_argument(
'file_path', metavar='file_path',
type=str, nargs='?', default='config.ini',
help="The config file path, default 'config.ini'"
)
# add program execute option
argparse.add_argument(
'-i', '--init',
dest='create_config', action='store_const',
const=create_config, required = True,
default=lambda _: argparse.print_help(),
help='Create default config file at [file_path].'
)
# parse argv vector into args
args = argparse.parse_args()
# execute depend on the args
args.create_config(args.file_path)
| configparser = RawConfigParser(allow_no_value = True)
# create required sections
configparser.add_section('global')
configparser.add_section('basecon')
configparser.add_section('seco')
configparser.add_section('redis')
configparser.add_section('memcached')
configparser.add_section('kvs')
configparser.add_section('kvs:init')
configparser.add_section('decibel')
configparser.add_section('sqlite:init')
configparser.add_section('sqlite:stmt')
configparser.add_section('mysql')
configparser.add_section('mysql:init')
configparser.add_section('mysql:stmt')
# add comments to sections
configparser.set('global', '; global settings')
configparser.set('basecon', '; choose base from (2 <= base <= 65)')
configparser.set('seco', "; choose `serialize` in ('json', 'msgpack', 'pickle')")
configparser.set('seco', "; chose `compress` in ('zlib', 'bz2')")
configparser.set('kvs', "; `engine` in (':memory:', 'redis', 'memcached', 'dbm')")
configparser.set('kvs', "; `path` is only used when `engine` is set to 'dbm'")
configparser.set('kvs:init', '; `key = value` format, value can be valid JSON string')
configparser.set('decibel', "; choose decibel `engine` in ('sqlite', 'mysql')")
configparser.set('decibel', "; `path` is only used when `engine` is set to 'sqlite'")
configparser.set('sqlite:init', '; `stmt_id = stmt` format, initialize statements')
configparser.set('sqlite:stmt', '; `stmt_id = stmt` format, regular statements')
configparser.set('mysql:init', '; `stmt_id = stmt` format, initialize statements')
configparser.set('mysql:stmt', '; `stmt_id = stmt` format, regular statements')
# read in default configs
configparser.read_dict({
'global': {},
'basecon': {'base': 62},
'seco': {
'serialize': 'msgpack', 'compress': 'zlib'
},
'redis': {
';unix_socket_path': '',
'host': 'localhost',
'port': '6379',
'password': '',
'db': '0',
},
'memcached': {
'host': 'localhost', 'port': '11211'
},
'kvs': {
'initialize': 'false',
'engine': ':memory:',
'path': './database.kvs'
},
'kvs:init': {},
'decibel': {
'initialize': 'false',
'engine': 'sqlite',
'path': './database.sqlite'
},
'sqlite:init': {},
'sqlite:stmt': {},
'mysql': {
'host': 'localhost', 'port': '3306',
'user': '', 'password': '', 'database': ''
},
'mysql:init': {},
'mysql:stmt': {}
})
# write default configs to file
with open(file_path, 'w', encoding = 'UTF8') as fp:
configparser.write(fp) |
Ex 85.py | valor = 0
num = [[],[]]
for c in range (0,7):
valor = int(input(f'Digite o {c+1}° valor:'))
if valor % 2 == 0:
n | else:
num[1].append(valor)
print('-='*30)
print(f'Os numeros pares foram {num[0]}\nOs numeros ímpares foram {num[1]}.')
| um[0].append(valor)
|
__init__.py | try:
from .engine import create_supervised_trainer
except ImportError: # no ignite
def | (*args, **kwargs):
raise ImportError('Ignite not installed')
from .modules import (Lambda,
Flatten,
View,
Concat,
Split,
SkipConnection,
NoInputSpec,
CannedNet,
CannedResNet)
from .recurrent import (Window,
Recur)
from .tracing import (convert_to_tensor,
Integer)
from .utils import (batch_fn,
outer_product,
flatten,
batch_flatten,
cat,
stack)
| create_supervised_trainer |
common.py | from __future__ import unicode_literals | from django.core.urlresolvers import reverse
Q = models.Q
class Game(models.Model):
name = models.CharField(max_length=128, db_index=True)
product_key = models.CharField(max_length=32, unique=True)
class Meta:
app_label = 'gge_proxy_manager'
def __unicode__(self):
return self.name
class Kingdom(models.Model):
name = models.CharField(max_length=128, null=True, default=None, blank=True)
kid = models.SmallIntegerField(db_index=True)
visual_key = models.CharField(max_length=8, default="-")
game = models.ForeignKey(Game, db_index=True, null=True, default=None, blank=True)
class Meta:
app_label = 'gge_proxy_manager'
def __unicode__(self):
if not self.name:
return "(unknown)"
return self.name
class Confederation(models.Model):
name = models.CharField(max_length=128)
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
slug = models.SlugField(max_length=128)
logo = models.FileField(null=True, blank=True, default=None, upload_to='confederation_logos/')
description = models.TextField(null=True, blank=True, default=None)
class Meta:
app_label = 'gge_proxy_manager'
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse("intern:confederation_dashboard", kwargs={"slug": self.slug})
def get_members(self):
return Player.objects.filter(alliance__confederation=self).order_by('alliance_rank', '-level') |
from django.db import models
from django.conf import settings
from .player import Player |
1110.py | n = int(input())
count = 1
_n = int(str(n % 10) + str((n // 10 + n % 10) % 10))
| print(count) | while _n != n:
count += 1
_n = int(str(_n % 10) + str((_n // 10 + _n % 10) % 10))
|
Scene.ts | interface ReplicatedScene {
tick: number;
boardId: string;
gameId: string;
playerId: string;
clientId: string;
boards: { [key: string]: ReplicatedBoard };
marbles: { [key: string]: ReplicatedMarble };
avatars: { [key: string]: ReplicatedAvatar };
notepads: { [key: string]: ReplicatedNotepad };
cards: { [key: string]: ReplicatedCard };
decks: { [key: string]: ReplicatedDeck };
privateAreas: { [key: string]: ReplicatedPrivateArea };
}
class | {
/** Replicated game properties */
public tick: number = 0;
public boardId: string = "";
public gameId: string = "";
public playerId: string = "";
public playerName: string = "";
public clientId: string = "";
/** Scene graph */
public avatars: { [key: string]: Avatar } = {};
public boards: { [key: string]: Board } = {};
public marbles: { [key: string]: Marble } = {};
public notepads: { [key: string]: Notepad } = {};
public privateAreas: { [key: string]: PrivateArea } = {};
public decks: { [key: string]: Deck } = {};
public cards: { [key: string]: Card } = {};
/** Computed properties */
public cardsOnDeck: { [key: string]: Card[] } = {};
public playerNames: { [key: string]: string } = {};
constructor() {
//
}
/** Distribute changes from remote */
synchronize(remote: ReplicatedScene) {
// Step one tick ahead so that any changes in this tick are
// properly propagated to server.
this.tick = Math.max(this.tick, remote.tick) + 1;
// Take over settings from server
this.boardId = remote.boardId;
this.gameId = remote.gameId;
this.playerId = remote.playerId;
this.clientId = remote.clientId;
// Propagate changes to itmes
for (const [key, replica] of Object.entries(remote.boards)) {
let item = this.boards[key];
if (item === undefined) {
item = new Board(key, replica, this);
this.boards[key] = item;
}
item.synchronize(replica);
}
for (const [key, replica] of Object.entries(remote.notepads)) {
let item = this.notepads[key];
if (item === undefined) {
item = new Notepad(key, replica, this);
this.notepads[key] = item;
}
item.synchronize(replica);
}
for (const [key, replica] of Object.entries(remote.avatars)) {
let item = this.avatars[key];
if (item === undefined) {
item = new Avatar(key, replica, this);
this.avatars[key] = item;
}
item.synchronize(replica);
}
for (const [key, replica] of Object.entries(remote.privateAreas)) {
let item = this.privateAreas[key];
if (item === undefined) {
item = new PrivateArea(key, replica, this);
this.privateAreas[key] = item;
}
item.synchronize(replica);
}
for (const [key, replica] of Object.entries(remote.cards)) {
let item = this.cards[key];
if (item === undefined) {
item = new Card(key, replica, this);
this.cards[key] = item;
}
item.synchronize(replica);
}
for (const [key, replica] of Object.entries(remote.decks)) {
let item = this.decks[key];
if (item === undefined) {
item = new Deck(key, replica, this);
this.decks[key] = item;
}
item.synchronize(replica);
}
for (const [key, replica] of Object.entries(remote.marbles)) {
let item = this.marbles[key];
if (item === undefined) {
item = new Marble(key, replica, this);
this.marbles[key] = item;
}
item.synchronize(replica);
}
}
layout() {
window.requestAnimationFrame(this._layout.bind(this));
}
private _layout(this: Scene) {
// Compute dependant properties
this.cardsOnDeck = {};
for (const [key, _] of Object.entries(this.decks)) {
this.cardsOnDeck[key] = [];
}
for (const [_, card] of Object.entries(this.cards)) {
if (card.replica.onDeck !== null) {
const cards = this.cardsOnDeck[card.replica.onDeck];
if (cards) {
cards.push(card);
}
}
}
// Discover names of players and assign an avatar to the player
this.playerNames = {};
for (const [_, avatar] of Object.entries(this.avatars)) {
if (avatar.replica.represents !== null) {
this.playerNames[avatar.replica.represents] = avatar.replica.text;
}
}
if (this.playerNames[this.playerId] === undefined) {
for (const [_, avatar] of Object.entries(this.avatars)) {
if (avatar.replica.represents === null) {
avatar.replica.tick = this.tick;
avatar.replica.owner = this.playerId;
avatar.replica.represents = this.playerId;
if (this.playerName === "") {
this.playerName = avatar.replica.text;
} else {
avatar.replica.text = this.playerName;
}
this.playerNames[this.playerId] = this.playerName;
break;
}
}
}
// Propagate new computed properties
for (const [_, item] of Object.entries(this.boards)) {
item.layoutByScene(0);
}
for (const [_, item] of Object.entries(this.notepads)) {
item.layoutByScene(1);
}
for (const [_, item] of Object.entries(this.avatars)) {
item.layoutByScene(10);
}
for (const [_, item] of Object.entries(this.privateAreas)) {
item.layoutByScene(100);
}
for (const [_, item] of Object.entries(this.decks)) {
item.layoutByScene(20000);
}
for (const [_, item] of Object.entries(this.cards)) {
item.layoutByScene(20000);
}
for (const [_, item] of Object.entries(this.marbles)) {
item.layoutByScene(30000);
}
}
differences(): ReplicatedScene {
const result: ReplicatedScene = {
tick: this.tick,
boardId: this.boardId,
gameId: this.gameId,
playerId: this.playerId,
clientId: this.clientId,
avatars: {},
boards: {},
marbles: {},
notepads: {},
privateAreas: {},
decks: {},
cards: {},
};
for (const [key, item] of Object.entries(this.avatars)) {
const changes = item.changes();
if (changes !== null) {
result.avatars[key] = changes;
}
}
for (const [key, item] of Object.entries(this.boards)) {
const changes = item.changes();
if (changes !== null) {
result.boards[key] = changes;
}
}
for (const [key, item] of Object.entries(this.marbles)) {
const changes = item.changes();
if (changes !== null) {
result.marbles[key] = changes;
}
}
for (const [key, item] of Object.entries(this.notepads)) {
const changes = item.changes();
if (changes !== null) {
result.notepads[key] = changes;
}
}
for (const [key, item] of Object.entries(this.privateAreas)) {
const changes = item.changes();
if (changes !== null) {
result.privateAreas[key] = changes;
}
}
for (const [key, item] of Object.entries(this.decks)) {
const changes = item.changes();
if (changes !== null) {
result.decks[key] = changes;
}
}
for (const [key, item] of Object.entries(this.cards)) {
const changes = item.changes();
if (changes !== null) {
result.cards[key] = changes;
}
}
return result;
}
createDeck(ref: Card): Deck {
for (let i = 0; i < 1000; i++) {
const key = "deck" + i;
if (!this.decks.hasOwnProperty(key)) {
const deck = Deck.fromCard(key, ref, this);
this.decks[key] = deck;
return deck;
}
}
throw new Error("Ids for decks exhausted.");
}
topZ(): number {
let z: number = 0;
// TODO: Items themselves should say how much z space they
// require, however, statically just as it is now.
for (const [_, deck] of Object.entries(this.decks)) {
if (deck.replica.z + 100 > z) {
z = deck.replica.z + 100;
}
}
for (const [_, card] of Object.entries(this.cards)) {
if (card.replica.z > z) {
z = card.replica.z;
}
}
for (const [_, marble] of Object.entries(this.marbles)) {
if (marble.replica.z > z) {
z = marble.replica.z;
}
}
return z;
}
pixelsOverlap(a: Card | Deck, b: Card | PrivateArea): number {
const h =
Math.min(a.box.x + a.box.w, b.box.x + b.box.w) -
Math.max(a.box.x, b.box.x);
const v =
Math.min(a.box.y + a.box.h, b.box.y + b.box.h) -
Math.max(a.box.y, b.box.y);
return Math.max(0, h) * Math.max(0, v);
}
overlapsPrivateArea(card: Card): PrivateArea | null {
for (const [_, area] of Object.entries(this.privateAreas)) {
if (this.pixelsOverlap(card, area) > 0) {
return area;
}
}
return null;
}
overlapsCard(card: Card | Deck): Card | null {
let largest: Card | null = null;
let pixels: number = 500;
for (const [_, other] of Object.entries(this.cards)) {
if (other === card) {
continue;
}
const priv = this.overlapsPrivateArea(other);
const area = this.pixelsOverlap(card, other);
let ownsOther: boolean = false;
if (other.replica.onDeck === null) {
ownsOther = other.replica.owner === card.replica.owner;
} else {
ownsOther =
this.decks[other.replica.onDeck]?.replica.owner ===
card.replica.owner;
}
if (area > pixels && ((priv && ownsOther) || !priv)) {
pixels = area;
largest = other;
}
}
return largest;
}
}
| Scene |
HomePartners.tsx | import * as React from 'react';
import { useTranslation } from 'react-i18next';
import styles from './homePartners.module.scss';
import Partners from './PartnerLogoList';
import { mainPartnerList, partnerList } from './constants/PartnersConstants';
const HomePartners: React.FunctionComponent = props => {
const { t } = useTranslation();
return (
<section className={styles.wrapper}> | <Partners size="small" partners={partnerList} />
</div>
</div>
</section>
);
};
export default HomePartners; | <div className={styles.innerwrapper}>
<h2>{t('home.partners.heading.text')}</h2>
<div className={styles.partners}>
<Partners size="big" partners={mainPartnerList} /> |
facade.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use anyhow::Error;
use fuchsia_syslog::macros::*;
/// Perform Logging operations.
///
/// Note this object is shared among all threads created by server.
///
#[derive(Debug)]
pub struct | {}
impl LoggingFacade {
pub fn new() -> LoggingFacade {
LoggingFacade {}
}
pub async fn log_err(&self, message: String) -> Result<(), Error> {
fx_log_err!("{:?}", message);
Ok(())
}
pub async fn log_info(&self, message: String) -> Result<(), Error> {
fx_log_info!("{:?}", message);
Ok(())
}
pub async fn log_warn(&self, message: String) -> Result<(), Error> {
fx_log_warn!("{:?}", message);
Ok(())
}
}
| LoggingFacade |
image.service.ts | import { ConflictException, HttpException, HttpStatus, Injectable } from '@nestjs/common';
import { IImage } from './Interface/image.interface'
import { PushImageDto } from './dto/push-image.dto'
@Injectable()
export class | {
private DB: IImage[]
constructor() {
this.DB = [
{id: 1, name: 'ghoul', url: 'https://ghoul.anime/image', description: 'true fan of ghouls'} //ссылка импровизированная :)
]
}
async findAll(){
return this.DB
}
async findImageByID(id: number){
const image = this.DB.find((item: IImage) => item.id === +id)
if(!image){
throw new HttpException('Image is not found', HttpStatus.NOT_FOUND)
}
return image
}
async pushToDB(dto: PushImageDto): Promise<IImage>{
const resultate = this.DB.find(image => image.url === dto.url)
if(resultate){
throw new ConflictException('Image already exist')
}
dto.id = this.DB.length +1
this.DB.push(dto)
return dto
}
async updateImage(id: number, dto: PushImageDto){
const image = this.DB.find((item: IImage) => item.id === +id)
if(!image){
throw new HttpException('Image is not found', HttpStatus.NOT_FOUND)
}
const newUrl = this.DB.find(image => image.url === dto.url)
if(newUrl){
throw new ConflictException('Image already exist')
}
image.name = dto.name
image.url = dto.url
image.description = dto.description
this.DB.find((item: IImage) => item.id === +id) == image
return image
}
async deleteImage(id: number){
const image = this.DB.find((item: IImage) => item.id === +id)
if(!image){
throw new HttpException('Image is not found', HttpStatus.NOT_FOUND)
}
const indexOfImage = this.DB.findIndex(item => item === image)
const deleted = this.DB.splice(indexOfImage, 1)
if(!deleted.length){
throw new HttpException('Image is not deleted', HttpStatus.INTERNAL_SERVER_ERROR)
}
return 'Image deleted'
}
}
| ImageService |
Strike.py | import asyncio
import discord
import time
import parsedatetime
from datetime import datetime
from operator import itemgetter
from discord.ext import commands
from Cogs import ReadableTime
from Cogs import DisplayName
from Cogs import Nullify
def setup(bot):
# Add the bot and deps
settings = bot.get_cog("Settings")
mute = bot.get_cog("Mute")
bot.add_cog(Strike(bot, settings, mute))
# This is the Strike module. It keeps track of warnings and kicks/bans accordingly
# Strikes = [ time until drops off ]
# StrikeOut = 3 (3 strikes and you're out)
# StrikeLevel (a list similar to xproles)
# Standard strike roles:
# 0 = Not been punished already
# 1 = Muted for x amount of time
# 2 = Already been kicked (id in kick list)
# 3 = Already been banned (auto-mute)
class Strike(commands.Cog):
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot, settings, mute):
self.bot = bot
self.settings = settings
self.mute = mute
self.loop_list = []
global Utils, DisplayName
Utils = self.bot.get_cog("Utils")
DisplayName = self.bot.get_cog("DisplayName")
async def onjoin(self, member, server):
# Check id against the kick and ban list and react accordingly
kickList = self.settings.getServerStat(server, "KickList")
if str(member.id) in kickList:
# The user has been kicked before - set their strikeLevel to 2
self.settings.setUserStat(member, server, "StrikeLevel", 2)
banList = self.settings.getServerStat(server, "BanList")
if str(member.id) in banList:
# The user has been kicked before - set their strikeLevel to 3
# Also mute them
self.settings.setUserStat(member, server, "StrikeLevel", 3)
self.settings.setUserStat(member, server, "Muted", True)
self.settings.setUserStat(member, server, "Cooldown", None)
await self.mute._mute(member, server)
# Proof of concept stuff for reloading cog/extension
def _is_submodule(self, parent, child):
return parent == child or child.startswith(parent + ".")
@commands.Cog.listener()
async def on_unloaded_extension(self, ext):
# Called to shut things down
if not self._is_submodule(ext.__name__, self.__module__):
return
for task in self.loop_list:
task.cancel()
@commands.Cog.listener()
async def on_loaded_extension(self, ext):
# See if we were loaded
if not self._is_submodule(ext.__name__, self.__module__):
return
self.bot.loop.create_task(self.start_loading())
async def start_loading(self):
await self.bot.wait_until_ready()
await self.bot.loop.run_in_executor(None, self.check_strikes)
def check_strikes(self):
# Check all strikes - and start timers
print("Checking strikes...")
t = time.time()
for server in self.bot.guilds:
for member in server.members:
strikes = self.settings.getUserStat(member, server, "Strikes")
if strikes == None:
continue
if len(strikes):
# We have a list
for strike in strikes:
# Make sure it's a strike that *can* roll off
if not strike['Time'] == -1:
self.loop_list.append(self.bot.loop.create_task(
self.checkStrike(member, strike)))
print("Strikes checked - took {} seconds.".format(time.time() - t))
async def checkStrike(self, member, strike):
# Start our countdown
countDown = int(strike['Time'])-int(time.time())
if countDown > 0:
# We have a positive countdown - let's wait
await asyncio.sleep(countDown)
strikes = self.settings.getUserStat(member, member.guild, "Strikes")
# Verify strike is still valid
if not strike in strikes:
return
strikes.remove(strike)
self.settings.setUserStat(member, member.guild, "Strikes", strikes)
@commands.command(pass_context=True)
async def strike(self, ctx, member: discord.Member = None, days=None, *, message: str = None):
"""Give a user a strike (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if member == None:
msg = 'Usage: `{}strike [member] [strike timeout (in days) - 0 = forever] [message (optional)]`'.format(
ctx.prefix)
await ctx.channel.send(msg)
return
# Check if we're striking ourselves
if member.id == ctx.message.author.id:
# We're giving ourselves a strike?
await ctx.channel.send('You can\'t give yourself a strike, silly.')
return
# Check if the bot is getting the strike
if member.id == self.bot.user.id:
await ctx.channel.send('I can\'t do that, *{}*.'.format(DisplayName.name(ctx.message.author)))
return
# Check if we're striking another admin/bot-admin
isAdmin = member.permissions_in(ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in member.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
if isAdmin:
await ctx.channel.send('You can\'t give other admins/bot-admins strikes, bub.')
return
# Check if days is an int - otherwise assume it's part of the message
try:
days = int(days)
except Exception:
if not days == None:
if message == None:
message = days
else:
message = days + ' ' + message
days = 0
# If it's not at least a day, it's forever
if days < 1:
days = -1
currentTime = int(time.time())
# Build our Strike
strike = {}
if days == -1:
strike['Time'] = -1
else:
strike['Time'] = currentTime+(86400*days)
self.loop_list.append(self.bot.loop.create_task(
self.checkStrike(member, strike)))
strike['Message'] = message
strike['GivenBy'] = ctx.message.author.id
strikes = self.settings.getUserStat(
member, ctx.message.guild, "Strikes")
strikeout = int(self.settings.getServerStat(
ctx.message.guild, "StrikeOut"))
strikeLevel = int(self.settings.getUserStat(
member, ctx.message.guild, "StrikeLevel"))
strikes.append(strike)
self.settings.setUserStat(
member, ctx.message.guild, "Strikes", strikes)
strikeNum = len(strikes)
# Set up consequences
if strikeLevel == 0:
consequence = '**muted for a day**.'
elif strikeLevel == 1:
consequence = '**kicked**.'
else:
consequence = '**banned**.'
# Check if we've struck out
if strikeNum < strikeout:
# We haven't struck out yet
msg = '*{}* has just received *strike {}*. *{}* more and they will be {}'.format(
DisplayName.name(member), strikeNum, strikeout-strikeNum, consequence)
else:
# We struck out - let's evaluate
if strikeLevel == 0:
cooldownFinal = currentTime+86400
checkRead = ReadableTime.getReadableTimeBetween(
currentTime, cooldownFinal)
if message:
mutemessage = 'You have been muted in *{}*.\nThe Reason:\n{}'.format(
Nullify.escape_all(ctx.guild.name), message)
else:
mutemessage = 'You have been muted in *{}*.'.format(
Nullify.escape_all(ctx.guild.name))
# Check if already muted
alreadyMuted = self.settings.getUserStat(
member, ctx.message.guild, "Muted")
if alreadyMuted:
# Find out for how long
muteTime = self.settings.getUserStat(
member, ctx.message.guild, "Cooldown")
if not muteTime == None:
if muteTime < cooldownFinal:
self.settings.setUserStat(
member, ctx.message.guild, "Cooldown", cooldownFinal)
timeRemains = ReadableTime.getReadableTimeBetween(
currentTime, cooldownFinal)
if message:
mutemessage = 'Your muted time in *{}* has been extended to *{}*.\nThe Reason:\n{}'.format(
Nullify.escape_all(ctx.guild.name), timeRemains, message)
else:
mutemessage = 'You muted time in *{}* has been extended to *{}*.'.format(
Nullify.escape_all(ctx.guild.name), timeRemains)
else:
self.settings.setUserStat(
member, ctx.message.guild, "Muted", True)
self.settings.setUserStat(
member, ctx.message.guild, "Cooldown", cooldownFinal)
await self.mute._mute(member, ctx.message.guild, cooldownFinal)
await member.send(mutemessage)
elif strikeLevel == 1:
kickList = self.settings.getServerStat(
ctx.message.guild, "KickList")
if not str(member.id) in kickList:
kickList.append(str(member.id))
self.settings.setServerStat(
ctx.message.guild, "KickList", kickList)
if message:
kickmessage = 'You have been kicked from *{}*.\nThe Reason:\n{}'.format(
Nullify.escape_all(ctx.guild.name), message)
else:
kickmessage = 'You have been kicked from *{}*.'.format(
Nullify.escape_all(ctx.guild.name))
await member.send(kickmessage)
await ctx.guild.kick(member)
else:
banList = self.settings.getServerStat(
ctx.message.guild, "BanList")
if not str(member.id) in banList:
banList.append(str(member.id))
self.settings.setServerStat(
ctx.message.guild, "BanList", banList)
if message:
banmessage = 'You have been banned from *{}*.\nThe Reason:\n{}'.format(
Nullify.escape_all(ctx.guild.name), message)
else:
banmessage = 'You have been banned from *{}*.'.format(
Nullify.escape_all(ctx.guild.name))
await member.send(banmessage)
await ctx.guild.ban(member)
self.settings.incrementStat(
member, ctx.message.guild, "StrikeLevel", 1)
self.settings.setUserStat(member, ctx.message.guild, "Strikes", [])
msg = '*{}* has just received *strike {}*. They have been {}'.format(
DisplayName.name(member), strikeNum, consequence)
await ctx.channel.send(msg)
@strike.error
async def strike_error(self, ctx, error):
# do stuff
msg = 'strike Error: {}'.format(error)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def strikes(self, ctx, *, member=None):
"""Check a your own, or another user's total strikes (bot-admin needed to check other users)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
if member == None:
member = ctx.message.author
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
# Only allow admins to check others' strikes
if not isAdmin:
if member:
if not member.id == ctx.message.author.id:
await ctx.channel.send('You are not a bot-admin. You can only see your own strikes.')
member = ctx.message.author
# Create blank embed
stat_embed = discord.Embed(color=member.color)
strikes = self.settings.getUserStat(
member, ctx.message.guild, "Strikes")
strikeout = int(self.settings.getServerStat(
ctx.message.guild, "StrikeOut"))
strikeLevel = int(self.settings.getUserStat(
member, ctx.message.guild, "StrikeLevel"))
# Add strikes, and strike level
stat_embed.add_field(name="Strikes", value=len(strikes), inline=True)
stat_embed.add_field(name="Strike Level",
value=strikeLevel, inline=True)
# Get member's avatar url
avURL = member.avatar_url
if not len(avURL):
avURL = member.default_avatar_url
if member.nick:
# We have a nickname
msg = "__***{},*** **who currently goes by** ***{}:***__\n\n".format(
member.name, member.nick)
# Add to embed
stat_embed.set_author(name='{}, who currently goes by {}'.format(
member.name, member.nick), icon_url=avURL)
else:
msg = "__***{}:***__\n\n".format(member.name)
# Add to embed
stat_embed.set_author(name='{}'.format(
member.name), icon_url=avURL)
# Get messages - and cooldowns
currentTime = int(time.time())
if not len(strikes):
# no strikes
messages = "None."
cooldowns = "None."
givenBy = "None."
else:
messages = ''
cooldowns = ''
givenBy = ''
for i in range(0, len(strikes)):
if strikes[i]['Message']:
messages += '{}. {}\n'.format(i+1, strikes[i]['Message'])
else:
messages += '{}. No message\n'.format(i+1)
timeLeft = strikes[i]['Time']
if timeLeft == -1:
cooldowns += '{}. Never rolls off\n'.format(i+1)
else:
timeRemains = ReadableTime.getReadableTimeBetween(
currentTime, timeLeft)
cooldowns += '{}. {}\n'.format(i+1, timeRemains)
given = strikes[i]['GivenBy']
givenBy += '{}. {}\n'.format(i+1, DisplayName.name(
DisplayName.memberForID(given, ctx.message.guild)))
# Add messages and cooldowns
stat_embed.add_field(name="Messages", value=messages, inline=True)
stat_embed.add_field(name="Time Left", value=cooldowns, inline=True)
stat_embed.add_field(name="Given By", value=givenBy, inline=True)
# Strikes remaining
stat_embed.add_field(name="Strikes Remaining",
value=strikeout-len(strikes), inline=True)
await ctx.channel.send(embed=stat_embed)
@commands.command(pass_context=True)
async def removestrike(self, ctx, *, member=None):
"""Removes a strike given to a member (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if member == None:
msg = 'Usage: `{}removestrike [member]`'.format(ctx.prefix)
await ctx.channel.send(msg)
return
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
# We have what we need - get the list
strikes = self.settings.getUserStat(
member, ctx.message.guild, "Strikes")
# Return if no strikes to take
if not len(strikes):
await ctx.channel.send('*{}* has no strikes to remove.'.format(DisplayName.name(member)))
return
# We have some - naughty naughty!
strikes = sorted(strikes, key=lambda x: int(x['Time']))
for strike in strikes:
# Check if we've got one that's not -1
if not strike['Time'] == -1:
# First item that isn't forever - kill it
strikes.remove(strike)
self.settings.setUserStat(
member, ctx.message.guild, "Strikes", strikes)
await ctx.channel.send('*{}* has one less strike. They are down to *{}*.'.format(DisplayName.name(member), len(strikes)))
return
# If we're here - we just remove one
del strikes[0]
self.settings.setUserStat(
member, ctx.message.guild, "Strikes", strikes)
await ctx.channel.send('*{}* has one less strike. They are down to *{}*.'.format(DisplayName.name(member), len(strikes)))
return
@commands.command(pass_context=True)
async def setstrikelevel(self, ctx, *, member=None, strikelevel: int = None):
"""Sets the strike level of the passed user (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
author = ctx.message.author
server = ctx.message.guild
channel = ctx.message.channel
usage = 'Usage: `{}setstrikelevel [member] [strikelevel]`'.format(
ctx.prefix)
if member == None:
await ctx.channel.send(usage)
return
# Check for formatting issues
if strikelevel == None:
# Either strike level wasn't set - or it's the last section
if type(member) is str:
# It' a string - the hope continues
nameCheck = DisplayName.checkNameForInt(member, server)
if not nameCheck:
await ctx.channel.send(usage)
return
if not nameCheck["Member"]:
msg = 'I couldn\'t find *{}* on the server.'.format(
Nullify.escape_all(member))
await ctx.channel.send(msg)
return
member = nameCheck["Member"]
strikelevel = nameCheck["Int"]
if strikelevel == None:
# Still no strike level
await ctx.channel.send(usage)
return
self.settings.setUserStat(
member, ctx.message.guild, "StrikeLevel", strikelevel)
msg = '*{}\'s* strike level has been set to *{}!*'.format(
DisplayName.name(member), strikelevel)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def addkick(self, ctx, *, member=None):
"""Adds the passed user to the kick list (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if member == None:
msg = 'Usage: `{}addkick [member]`'.format(ctx.prefix)
await ctx.channel.send(msg)
return
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
msg = ''
kickList = self.settings.getServerStat(ctx.message.guild, "KickList")
if not str(member.id) in kickList:
kickList.append(str(member.id))
| DisplayName.name(member))
else:
msg = '*{}* is already in the kick list.'.format(
DisplayName.name(member))
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def removekick(self, ctx, *, member=None):
"""Removes the passed user from the kick list (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if member == None:
msg = 'Usage: `{}removekick [member]`'.format(ctx.prefix)
await ctx.channel.send(msg)
return
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
msg = ''
kickList = self.settings.getServerStat(ctx.message.guild, "KickList")
if str(member.id) in kickList:
kickList.remove(str(member.id))
self.settings.setServerStat(
ctx.message.guild, "KickList", kickList)
msg = '*{}* was removed from the kick list.'.format(
DisplayName.name(member))
else:
msg = '*{}* was not found in the kick list.'.format(
DisplayName.name(member))
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def addban(self, ctx, *, member=None):
"""Adds the passed user to the ban list (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if member == None:
msg = 'Usage: `{}addban [member]`'.format(ctx.prefix)
await ctx.channel.send(msg)
return
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
msg = ''
banList = self.settings.getServerStat(ctx.message.guild, "BanList")
if not str(member.id) in banList:
banList.append(str(member.id))
self.settings.setServerStat(ctx.message.guild, "BanList", banList)
msg = '*{}* was added to the ban list.'.format(
DisplayName.name(member))
else:
msg = '*{}* is already in the ban list.'.format(
DisplayName.name(member))
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def removeban(self, ctx, *, member=None):
"""Removes the passed user from the ban list (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if member == None:
msg = 'Usage: `{}removeban [member]`'.format(ctx.prefix)
await ctx.channel.send(msg)
return
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
msg = ''
banList = self.settings.getServerStat(ctx.message.guild, "BanList")
if str(member.id) in banList:
banList.remove(str(member.id))
self.settings.setServerStat(ctx.message.guild, "BanList", banList)
msg = '*{}* was removed from the ban list.'.format(
DisplayName.name(member))
else:
msg = '*{}* was not found in the ban list.'.format(
DisplayName.name(member))
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def iskicked(self, ctx, *, member=None):
"""Lists whether the user is in the kick list."""
if member == None:
member = ctx.message.author
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
kickList = self.settings.getServerStat(ctx.message.guild, "KickList")
if str(member.id) in kickList:
msg = '*{}* is in the kick list.'.format(DisplayName.name(member))
else:
msg = '*{}* is **not** in the kick list.'.format(
DisplayName.name(member))
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def isbanned(self, ctx, *, member=None):
"""Lists whether the user is in the ban list."""
if member == None:
member = ctx.message.author
if type(member) is str:
memberName = member
member = DisplayName.memberForName(memberName, ctx.message.guild)
if not member:
msg = 'I couldn\'t find *{}*...'.format(
Nullify.escape_all(memberName))
await ctx.channel.send(msg)
return
banList = self.settings.getServerStat(ctx.message.guild, "BanList")
if str(member.id) in banList:
msg = '*{}* is in the ban list.'.format(DisplayName.name(member))
else:
msg = '*{}* is **not** in the ban list.'.format(
DisplayName.name(member))
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def strikelimit(self, ctx):
"""Lists the number of strikes before advancing to the next consequence."""
strikeout = int(self.settings.getServerStat(
ctx.message.guild, "StrikeOut"))
msg = '*{}* strikes are required to strike out.'.format(strikeout)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def setstrikelimit(self, ctx, limit=None):
"""Sets the number of strikes before advancing to the next consequence (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(
ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(
ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if not limit:
await ctx.channel.send('Strike limit must be *at least* one.')
return
try:
limit = int(limit)
except Exception:
await ctx.channel.send('Strike limit must be an integer.')
return
self.settings.setServerStat(ctx.message.guild, "StrikeOut", limit)
msg = '*{}* strikes are now required to strike out.'.format(limit)
await ctx.channel.send(msg)
@setstrikelimit.error
async def setstrikelimit_error(self, ctx, error):
# do stuff
msg = 'setstrikelimit Error: {}'.format(ctx)
await error.channel.send(msg) | self.settings.setServerStat(
ctx.message.guild, "KickList", kickList)
msg = '*{}* was added to the kick list.'.format(
|
random_test.py | # Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for haiku._src.random."""
import functools
from absl.testing import absltest
from haiku._src import base
from haiku._src import random
from haiku._src import transform
import jax
from jax import prng
import jax.numpy as jnp
import numpy as np
class RandomTest(absltest.TestCase):
def test_optimize_rng_splitting(self):
def | ():
k1 = base.next_rng_key()
k2 = base.next_rng_key()
return k1, k2
key = jax.random.PRNGKey(42)
assert_allclose = functools.partial(np.testing.assert_allclose, atol=1e-5)
# With optimize_rng_use the keys returned should be equal to split(n).
f_opt = transform.transform(random.optimize_rng_use(f))
jax.tree_multimap(assert_allclose,
f_opt.apply({}, key),
tuple(jax.random.split(key, 3))[1:])
# Without optimize_rng_use the keys should be equivalent to splitting in a
# loop.
f = transform.transform(f)
jax.tree_multimap(assert_allclose,
f.apply({}, key),
tuple(split_for_n(key, 2)))
def test_rbg_default_impl(self):
with jax.default_prng_impl("rbg"):
key = jax.random.PRNGKey(42)
self.assertEqual(key.shape, (4,))
_, apply = transform.transform(base.next_rng_key)
out_key = apply({}, key)
self.assertEqual(out_key.shape, (4,))
class CustomRNGTest(absltest.TestCase):
def setUp(self):
super().setUp()
jax.config.update("jax_enable_custom_prng", True)
def tearDown(self):
super().tearDown()
jax.config.update("jax_enable_custom_prng", False)
def test_custom_key(self):
count = 0
def count_splits(_, num):
nonlocal count
count += 1
return jnp.zeros((num, 13), np.uint32)
differently_shaped_prng_impl = prng.PRNGImpl(
# Testing a different key shape to make sure it's accepted by Haiku
key_shape=(13,),
seed=lambda _: jnp.zeros((13,), np.uint32),
split=count_splits,
random_bits=lambda *_, data: jnp.zeros(data, np.uint32),
fold_in=lambda key, _: key)
init, _ = transform.transform(base.next_rng_key)
key = prng.seed_with_impl(differently_shaped_prng_impl, 42)
init(key)
self.assertEqual(count, 1)
# testing if Tracers with a different key shape are accepted
jax.jit(init)(key)
self.assertEqual(count, 2)
def split_for_n(key, n):
for _ in range(n):
key, subkey = jax.random.split(key)
yield subkey
if __name__ == "__main__":
absltest.main()
| f |
error.rs | // OpenTimestamps Library
// Written in 2017 by
// Andrew Poelstra <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication
// along with this software.
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
//
//! # Errors
//!
//! Library-wide error type and associated boilerplate
//!
use std::error;
use std::{fmt, io};
use std::string::FromUtf8Error;
/// Library-wide error structure
#[allow(missing_docs)]
#[derive(Debug)]
pub enum Error {
/// Recursed too deeply
StackOverflow,
/// A URI had a character we don't like
InvalidUriChar(char),
/// A digest type tag was not recognized
BadDigestTag(u8),
/// Decoded an op tag that we don't recognize
BadOpTag(u8),
/// OTS file began with invalid magic bytes
BadMagic(Vec<u8>),
/// OTS file has version we don't understand
BadVersion(usize),
/// A byte vector had an invalid length
BadLength { min: usize, max: usize, val: usize },
/// Expected EOF but didn't get it
TrailingBytes,
/// UTF8
Utf8(FromUtf8Error),
/// I/O error
Io(io::Error)
}
impl From<FromUtf8Error> for Error {
fn | (e: FromUtf8Error) -> Error {
Error::Utf8(e)
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Error {
Error::Io(e)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::StackOverflow => f.write_str("recursion limit reached"),
Error::InvalidUriChar(c) => write!(f, "invalid character `{}` in URI", c),
Error::BadDigestTag(t) => write!(f, "invalid digest tag 0x{:02x}", t),
Error::BadOpTag(t) => write!(f, "invalid op tag 0x{:02x}", t),
Error::BadMagic(ref x) => write!(f, "bad magic bytes `{:?}`, is this a timestamp file?", x),
Error::BadVersion(v) => write!(f, "version {} timestamps not understood", v),
Error::BadLength { min, max, val } => write!(f, "length {} should be between {} and {} inclusive", val, min, max),
Error::TrailingBytes => f.write_str("expected eof not"), // lol
Error::Utf8(ref e) => fmt::Display::fmt(e, f),
Error::Io(ref e) => fmt::Display::fmt(e, f)
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::StackOverflow => "recursion limit reached",
Error::InvalidUriChar(_) => "invalid character in URI",
Error::BadDigestTag(_) => "invalid digest tag",
Error::BadOpTag(_) => "invalid op tag",
Error::BadMagic(_) => "bad magic bytes, is this a timestamp file?",
Error::BadVersion(_) => "timestamp version not understood",
Error::BadLength { .. } => "length out of bounds",
Error::TrailingBytes => "expected eof not",
Error::Utf8(ref e) => error::Error::description(e),
Error::Io(ref e) => error::Error::description(e)
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Utf8(ref e) => Some(e),
Error::Io(ref e) => Some(e),
_ => None
}
}
}
| from |
richtext.go | package main
import (
"fmt"
"github.com/parsiya/Parsia-Code/markdown-parsing/parse"
)
var someRichText = `
This is line one.
This is line two.
This is a list:
* item1
* item2
`
func | () {
fmt.Println(parse.RichText(someRichText))
}
| main |
data.py | import logging
import mimetypes
import os
import shutil
import tempfile
import zipfile
from cgi import escape
from inspect import isclass
import metadata
from galaxy import util
from galaxy.datatypes.metadata import MetadataElement # import directly to maintain ease of use in Datatype class definitions
from galaxy.util import inflector
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
from galaxy.util.sanitize_html import sanitize_html
import dataproviders
import paste
XSS_VULNERABLE_MIME_TYPES = [
'image/svg+xml', # Unfiltered by Galaxy and may contain JS that would be executed by some browsers.
'application/xml', # Some browsers will evalute SVG embedded JS in such XML documents.
]
DEFAULT_MIME_TYPE = 'text/plain' # Vulnerable mime types will be replaced with this.
log = logging.getLogger(__name__)
# Valid first column and strand column values vor bed, other formats
col1_startswith = ['chr', 'chl', 'groupun', 'reftig_', 'scaffold', 'super_', 'vcho']
valid_strand = ['+', '-', '.']
class DataMeta( type ):
"""
Metaclass for Data class. Sets up metadata spec.
"""
def __init__( cls, name, bases, dict_ ):
cls.metadata_spec = metadata.MetadataSpecCollection()
for base in bases: # loop through bases (class/types) of cls
if hasattr( base, "metadata_spec" ): # base of class Data (object) has no metadata
cls.metadata_spec.update( base.metadata_spec ) # add contents of metadata spec of base class to cls
metadata.Statement.process( cls )
@dataproviders.decorators.has_dataproviders
class Data( object ):
"""
Base class for all datatypes. Implements basic interfaces as well
as class methods for metadata.
>>> class DataTest( Data ):
... MetadataElement( name="test" )
...
>>> DataTest.metadata_spec.test.name
'test'
>>> DataTest.metadata_spec.test.desc
'test'
>>> type( DataTest.metadata_spec.test.param )
<class 'galaxy.datatypes.metadata.MetadataParameter'>
"""
edam_format = "format_1915"
# Data is not chunkable by default.
CHUNKABLE = False
#: dictionary of metadata fields for this datatype::
metadata_spec = None
__metaclass__ = DataMeta
# Add metadata elements
MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
# Stores the set of display applications, and viewing methods, supported by this datatype
supported_display_apps = {}
# If False, the peek is regenerated whenever a dataset of this type is copied
copy_safe_peek = True
# The dataset contains binary data --> do not space_to_tab or convert newlines, etc.
# Allow binary file uploads of this type when True.
is_binary = True
# Allow user to change between this datatype and others. If False, this datatype
# cannot be changed from or into.
allow_datatype_change = True
# Composite datatypes
composite_type = None
composite_files = odict()
primary_file_name = 'index'
# A per datatype setting (inherited): max file size (in bytes) for setting optional metadata
_max_optional_metadata_filesize = None
# Trackster track type.
track_type = None
# Data sources.
data_sources = {}
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
self.supported_display_apps = self.supported_display_apps.copy()
self.composite_files = self.composite_files.copy()
self.display_applications = odict()
def write_from_stream(self, dataset, stream):
"""Writes data from a stream"""
fd = open(dataset.file_name, 'wb')
while True:
chunk = stream.read(1048576)
if not chunk:
break
os.write(fd, chunk)
os.close(fd)
def set_raw_data(self, dataset, data):
"""Saves the data on the disc"""
fd = open(dataset.file_name, 'wb')
os.write(fd, data)
os.close(fd)
def get_raw_data( self, dataset ):
"""Returns the full data. To stream it open the file_name and read/write as needed"""
try:
return file(dataset.file_name, 'rb').read(-1)
except OSError:
log.exception('%s reading a file that does not exist %s' % (self.__class__.__name__, dataset.file_name))
return ''
def dataset_content_needs_grooming( self, file_name ):
"""This function is called on an output dataset file after the content is initially generated."""
return False
def groom_dataset_content( self, file_name ):
"""This function is called on an output dataset file if dataset_content_needs_grooming returns True."""
pass
def init_meta( self, dataset, copy_from=None ):
# Metadata should be left mostly uninitialized. Dataset will
# handle returning default values when metadata is not set.
# copy_from allows metadata to be passed in that will be
# copied. (although this seems ambiguous, see
# Dataset.set_metadata. It always copies the rhs in order to
# flag the object as modified for SQLAlchemy.
if copy_from:
dataset.metadata = copy_from.metadata
def set_meta( self, dataset, overwrite=True, **kwd ):
"""Unimplemented method, allows guessing of metadata from contents of file"""
return True
def missing_meta( self, dataset, check=[], skip=[] ):
"""
Checks for empty metadata values, Returns True if non-optional metadata is missing
Specifying a list of 'check' values will only check those names provided; when used, optionality is ignored
Specifying a list of 'skip' items will return True even when a named metadata value is missing
"""
if check:
to_check = [ ( to_check, dataset.metadata.get( to_check ) ) for to_check in check ]
else:
to_check = dataset.metadata.items()
for key, value in to_check:
if key in skip or ( not check and dataset.metadata.spec[key].get( "optional" ) ):
continue # we skip check for optional and nonrequested values here
if not value:
return True
return False
def set_max_optional_metadata_filesize( self, max_value ):
try:
max_value = int( max_value )
except:
return
self.__class__._max_optional_metadata_filesize = max_value
def get_max_optional_metadata_filesize( self ):
rval = self.__class__._max_optional_metadata_filesize
if rval is None:
return -1
return rval
max_optional_metadata_filesize = property( get_max_optional_metadata_filesize, set_max_optional_metadata_filesize )
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
dataset.peek = ''
dataset.blurb = 'data'
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def display_peek(self, dataset ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
try:
if not dataset.peek:
dataset.set_peek()
data = dataset.peek
lines = data.splitlines()
for line in lines:
line = line.strip()
if not line:
continue
if isinstance(line, unicode):
out.append( '<tr><td>%s</td></tr>' % escape( line ) )
else:
out.append( '<tr><td>%s</td></tr>' % escape( unicode( line, 'utf-8' ) ) )
out.append( '</table>' )
out = "".join( out )
except Exception as exc:
out = "Can't create peek %s" % str( exc )
return out
def _archive_main_file(self, archive, display_name, data_filename):
"""Called from _archive_composite_dataset to add central file to archive.
Unless subclassed, this will add the main dataset file (argument data_filename)
to the archive, as an HTML file with its filename derived from the dataset name
(argument outfname).
Returns a tuple of boolean, string, string: (error, msg, messagetype)
"""
error, msg, messagetype = False, "", ""
archname = '%s.html' % display_name # fake the real nature of the html file
try:
archive.add(data_filename, archname)
except IOError:
error = True
log.exception("Unable to add composite parent %s to temporary library download archive" % data_filename)
msg = "Unable to create archive for download, please report this error" |
def _archive_composite_dataset( self, trans, data=None, **kwd ):
# save a composite object into a compressed archive for downloading
params = util.Params( kwd )
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
outfname = data.name[0:150]
outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
if params.do_action is None:
params.do_action = 'zip' # default
msg = util.restore_text( params.get( 'msg', '' ) )
if not data:
msg = "You must select at least one dataset"
else:
error = False
try:
if params.do_action == 'zip':
# Can't use mkstemp - the file must not exist first
tmpd = tempfile.mkdtemp()
util.umask_fix_perms( tmpd, trans.app.config.umask, 0o777, trans.app.config.gid )
tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
elif params.do_action == 'tgz':
archive = util.streamball.StreamBall( 'w|gz' )
elif params.do_action == 'tbz':
archive = util.streamball.StreamBall( 'w|bz2' )
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
msg = "Unable to create archive for %s for download, please report this error" % outfname
if not error:
ext = data.extension
path = data.file_name
fname = os.path.split(path)[-1]
efp = data.extra_files_path
# Add any central file to the archive,
display_name = os.path.splitext(outfname)[0]
if not display_name.endswith(ext):
display_name = '%s_%s' % (display_name, ext)
error, msg = self._archive_main_file(archive, display_name, path)[:2]
if not error:
# Add any child files to the archive,
for root, dirs, files in os.walk(efp):
for fname in files:
fpath = os.path.join(root, fname)
rpath = os.path.relpath(fpath, efp)
try:
archive.add( fpath, rpath )
except IOError:
error = True
log.exception( "Unable to add %s to temporary library download archive" % rpath)
msg = "Unable to create archive for download, please report this error"
continue
if not error:
if params.do_action == 'zip':
archive.close()
tmpfh = open( tmpf )
# CANNOT clean up - unlink/rmdir was always failing because file handle retained to return - must rely on a cron job to clean up tmp
trans.response.set_content_type( "application/x-zip-compressed" )
trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.zip"' % outfname
return tmpfh
else:
trans.response.set_content_type( "application/x-tar" )
outext = 'tgz'
if params.do_action == 'tbz':
outext = 'tbz'
trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (outfname, outext)
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
return trans.show_error_message( msg )
def _serve_raw(self, trans, dataset, to_ext):
trans.response.headers['Content-Length'] = int( os.stat( dataset.file_name ).st_size )
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
fname = ''.join(c in valid_chars and c or '_' for c in dataset.name)[0:150]
trans.response.set_content_type( "application/octet-stream" ) # force octet-stream so Safari doesn't append mime extensions to filename
trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (dataset.hid, fname, to_ext)
return open( dataset.file_name )
def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
""" Old display method, for transition - though still used by API and
test framework. Datatypes should be very careful if overridding this
method and this interface between datatypes and Galaxy will likely
change.
TOOD: Document alternatives to overridding this method (data
providers?).
"""
# Relocate all composite datatype display to a common location.
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
composite_extensions.append('html') # for archiving composite datatypes
# Prevent IE8 from sniffing content type since we're explicit about it. This prevents intentionally text/plain
# content from being rendered in the browser
trans.response.headers['X-Content-Type-Options'] = 'nosniff'
if isinstance( data, basestring ):
return data
if filename and filename != "index":
# For files in extra_files_path
file_path = trans.app.object_store.get_filename(data.dataset, extra_dir='dataset_%s_files' % data.dataset.id, alt_name=filename)
if os.path.exists( file_path ):
if os.path.isdir( file_path ):
return trans.show_error_message( "Directory listing is not allowed." ) # TODO: Reconsider allowing listing of directories?
mime = mimetypes.guess_type( file_path )[0]
if not mime:
try:
mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] )
except:
mime = "text/plain"
self._clean_and_set_mime_type( trans, mime )
return open( file_path )
else:
return paste.httpexceptions.HTTPNotFound( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) )
self._clean_and_set_mime_type( trans, data.get_mime() )
trans.log_event( "Display dataset id: %s" % str( data.id ) )
from galaxy import datatypes # DBTODO REMOVE THIS AT REFACTOR
if to_ext or isinstance(data.datatype, datatypes.binary.Binary): # Saving the file, or binary file
if data.extension in composite_extensions:
return self._archive_composite_dataset( trans, data, **kwd )
else:
trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
if not to_ext:
to_ext = data.extension
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
fname = ''.join(c in valid_chars and c or '_' for c in data.name)[0:150]
trans.response.set_content_type( "application/octet-stream" ) # force octet-stream so Safari doesn't append mime extensions to filename
trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (data.hid, fname, to_ext)
return open( data.file_name )
if not os.path.exists( data.file_name ):
raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % data.file_name )
max_peek_size = 1000000 # 1 MB
if isinstance(data.datatype, datatypes.images.Html):
max_peek_size = 10000000 # 10 MB for html
preview = util.string_as_bool( preview )
if not preview or isinstance(data.datatype, datatypes.images.Image) or os.stat( data.file_name ).st_size < max_peek_size:
if trans.app.config.sanitize_all_html and trans.response.get_content_type() == "text/html":
# Sanitize anytime we respond with plain text/html content.
# Check to see if this dataset's parent job is whitelisted
# We cannot currently trust imported datasets for rendering.
if not data.creating_job.imported and data.creating_job.tool_id in trans.app.config.sanitize_whitelist:
return open(data.file_name).read()
return sanitize_html(open( data.file_name ).read())
return open( data.file_name )
else:
trans.response.set_content_type( "text/html" )
return trans.stream_template_mako( "/dataset/large_file.mako",
truncated_data=open( data.file_name ).read(max_peek_size),
data=data)
def display_name(self, dataset):
"""Returns formatted html of dataset name"""
try:
if isinstance(dataset.name, unicode):
return escape( dataset.name )
else:
return escape( unicode( dataset.name, 'utf-8 ') )
except:
return "name unavailable"
def display_info(self, dataset):
"""Returns formatted html of dataset info"""
try:
# Change new line chars to html
info = escape( dataset.info )
if info.find( '\r\n' ) >= 0:
info = info.replace( '\r\n', '<br/>' )
if info.find( '\r' ) >= 0:
info = info.replace( '\r', '<br/>' )
if info.find( '\n' ) >= 0:
info = info.replace( '\n', '<br/>' )
# Convert to unicode to display non-ascii characters.
if not isinstance(info, unicode):
info = unicode( info, 'utf-8')
return info
except:
return "info unavailable"
def validate(self, dataset):
"""Unimplemented validate, return no exceptions"""
return list()
def repair_methods(self, dataset):
"""Unimplemented method, returns dict with method/option for repairing errors"""
return None
def get_mime(self):
"""Returns the mime type of the datatype"""
return 'application/octet-stream'
def add_display_app( self, app_id, label, file_function, links_function ):
"""
Adds a display app to the datatype.
app_id is a unique id
label is the primary display label, e.g., display at 'UCSC'
file_function is a string containing the name of the function that returns a properly formatted display
links_function is a string containing the name of the function that returns a list of (link_name,link)
"""
self.supported_display_apps = self.supported_display_apps.copy()
self.supported_display_apps[app_id] = {'label': label, 'file_function': file_function, 'links_function': links_function}
def remove_display_app(self, app_id):
"""Removes a display app from the datatype"""
self.supported_display_apps = self.supported_display_apps.copy()
try:
del self.supported_display_apps[app_id]
except:
log.exception('Tried to remove display app %s from datatype %s, but this display app is not declared.' % ( type, self.__class__.__name__ ) )
def clear_display_apps( self ):
self.supported_display_apps = {}
def add_display_application( self, display_application ):
"""New style display applications"""
assert display_application.id not in self.display_applications, 'Attempted to add a display application twice'
self.display_applications[ display_application.id ] = display_application
def get_display_application( self, key, default=None ):
return self.display_applications.get( key, default )
def get_display_applications_by_dataset( self, dataset, trans ):
rval = odict()
for key, value in self.display_applications.iteritems():
value = value.filter_by_dataset( dataset, trans )
if value.links:
rval[key] = value
return rval
def get_display_types(self):
"""Returns display types available"""
return self.supported_display_apps.keys()
def get_display_label(self, type):
"""Returns primary label for display app"""
try:
return self.supported_display_apps[type]['label']
except:
return 'unknown'
def as_display_type(self, dataset, type, **kwd):
"""Returns modified file contents for a particular display type """
try:
if type in self.get_display_types():
return getattr(self, self.supported_display_apps[type]['file_function'])(dataset, **kwd)
except:
log.exception('Function %s is referred to in datatype %s for displaying as type %s, but is not accessible' % (self.supported_display_apps[type]['file_function'], self.__class__.__name__, type) )
return "This display type (%s) is not implemented for this datatype (%s)." % ( type, dataset.ext)
def get_display_links( self, dataset, type, app, base_url, target_frame='_blank', **kwd ):
"""
Returns a list of tuples of (name, link) for a particular display type. No check on
'access' permissions is done here - if you can view the dataset, you can also save it
or send it to a destination outside of Galaxy, so Galaxy security restrictions do not
apply anyway.
"""
try:
if app.config.enable_old_display_applications and type in self.get_display_types():
return target_frame, getattr( self, self.supported_display_apps[type]['links_function'] )( dataset, type, app, base_url, **kwd )
except:
log.exception( 'Function %s is referred to in datatype %s for generating links for type %s, but is not accessible'
% ( self.supported_display_apps[type]['links_function'], self.__class__.__name__, type ) )
return target_frame, []
def get_converter_types(self, original_dataset, datatypes_registry):
"""Returns available converters by type for this dataset"""
return datatypes_registry.get_converters_by_datatype(original_dataset.ext)
def find_conversion_destination( self, dataset, accepted_formats, datatypes_registry, **kwd ):
"""Returns ( target_ext, existing converted dataset )"""
return datatypes_registry.find_conversion_destination_for_dataset_by_extensions( dataset, accepted_formats, **kwd )
def convert_dataset(self, trans, original_dataset, target_type, return_output=False, visible=True, deps=None, set_output_history=True):
"""This function adds a job to the queue to convert a dataset to another type. Returns a message about success/failure."""
converter = trans.app.datatypes_registry.get_converter_by_target_type( original_dataset.ext, target_type )
if converter is None:
raise Exception( "A converter does not exist for %s to %s." % ( original_dataset.ext, target_type ) )
# Generate parameter dictionary
params = {}
# determine input parameter name and add to params
input_name = 'input1'
for key, value in converter.inputs.items():
if deps and value.name in deps:
params[value.name] = deps[value.name]
elif value.type == 'data':
input_name = key
params[input_name] = original_dataset
# Run converter, job is dispatched through Queue
converted_dataset = converter.execute( trans, incoming=params, set_output_hid=visible, set_output_history=set_output_history)[1]
if len(params) > 0:
trans.log_event( "Converter params: %s" % (str(params)), tool_id=converter.id )
if not visible:
for value in converted_dataset.itervalues():
value.visible = False
if return_output:
return converted_dataset
return "The file conversion of %s on data %s has been added to the Queue." % (converter.name, original_dataset.hid)
# We need to clear associated files before we set metadata
# so that as soon as metadata starts to be set, e.g. implicitly converted datasets are deleted and no longer available 'while' metadata is being set, not just after
# We'll also clear after setting metadata, for backwards compatibility
def after_setting_metadata( self, dataset ):
"""This function is called on the dataset after metadata is set."""
dataset.clear_associated_files( metadata_safe=True )
def before_setting_metadata( self, dataset ):
"""This function is called on the dataset before metadata is set."""
dataset.clear_associated_files( metadata_safe=True )
def __new_composite_file( self, name, optional=False, mimetype=None, description=None, substitute_name_with_metadata=None, is_binary=False, to_posix_lines=True, space_to_tab=False, **kwds ):
kwds[ 'name' ] = name
kwds[ 'optional' ] = optional
kwds[ 'mimetype' ] = mimetype
kwds[ 'description' ] = description
kwds[ 'substitute_name_with_metadata' ] = substitute_name_with_metadata
kwds[ 'is_binary' ] = is_binary
kwds[ 'to_posix_lines' ] = to_posix_lines
kwds[ 'space_to_tab' ] = space_to_tab
return Bunch( **kwds )
def add_composite_file( self, name, **kwds ):
# self.composite_files = self.composite_files.copy()
self.composite_files[ name ] = self.__new_composite_file( name, **kwds )
def __substitute_composite_key( self, key, composite_file, dataset=None ):
if composite_file.substitute_name_with_metadata:
if dataset:
meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
else:
meta_value = self.spec[composite_file.substitute_name_with_metadata].default
return key % meta_value
return key
@property
def writable_files( self, dataset=None ):
files = odict()
if self.composite_type != 'auto_primary_file':
files[ self.primary_file_name ] = self.__new_composite_file( self.primary_file_name )
for key, value in self.get_composite_files( dataset=dataset ).iteritems():
files[ key ] = value
return files
def get_composite_files( self, dataset=None ):
def substitute_composite_key( key, composite_file ):
if composite_file.substitute_name_with_metadata:
if dataset:
meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
else:
meta_value = self.metadata_spec[ composite_file.substitute_name_with_metadata ].default
return key % meta_value
return key
files = odict()
for key, value in self.composite_files.iteritems():
files[ substitute_composite_key( key, value ) ] = value
return files
def generate_auto_primary_file( self, dataset=None ):
raise Exception( "generate_auto_primary_file is not implemented for this datatype." )
@property
def has_resolution(self):
return False
def matches_any( self, target_datatypes ):
"""
Check if this datatype is of any of the target_datatypes or is
a subtype thereof.
"""
datatype_classes = tuple( [ datatype if isclass( datatype ) else datatype.__class__ for datatype in target_datatypes ] )
return isinstance( self, datatype_classes )
def merge( split_files, output_file):
"""
Merge files with copy.copyfileobj() will not hit the
max argument limitation of cat. gz and bz2 files are also working.
"""
if not split_files:
raise ValueError('Asked to merge zero files as %s' % output_file)
elif len(split_files) == 1:
shutil.copyfileobj(open(split_files[0], 'rb'), open(output_file, 'wb'))
else:
fdst = open(output_file, 'wb')
for fsrc in split_files:
shutil.copyfileobj(open(fsrc, 'rb'), fdst)
fdst.close()
merge = staticmethod(merge)
def get_visualizations( self, dataset ):
"""
Returns a list of visualizations for datatype.
"""
if self.track_type:
return [ 'trackster', 'circster' ]
return []
# ------------- Dataproviders
def has_dataprovider( self, data_format ):
"""
Returns True if `data_format` is available in `dataproviders`.
"""
return data_format in self.dataproviders
def dataprovider( self, dataset, data_format, **settings ):
"""
Base dataprovider factory for all datatypes that returns the proper provider
for the given `data_format` or raises a `NoProviderAvailable`.
"""
if self.has_dataprovider( data_format ):
return self.dataproviders[ data_format ]( self, dataset, **settings )
raise dataproviders.exceptions.NoProviderAvailable( self, data_format )
@dataproviders.decorators.dataprovider_factory( 'base' )
def base_dataprovider( self, dataset, **settings ):
dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
return dataproviders.base.DataProvider( dataset_source, **settings )
@dataproviders.decorators.dataprovider_factory( 'chunk', dataproviders.chunk.ChunkDataProvider.settings )
def chunk_dataprovider( self, dataset, **settings ):
dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
return dataproviders.chunk.ChunkDataProvider( dataset_source, **settings )
@dataproviders.decorators.dataprovider_factory( 'chunk64', dataproviders.chunk.Base64ChunkDataProvider.settings )
def chunk64_dataprovider( self, dataset, **settings ):
dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
return dataproviders.chunk.Base64ChunkDataProvider( dataset_source, **settings )
def _clean_and_set_mime_type(self, trans, mime):
if mime.lower() in XSS_VULNERABLE_MIME_TYPES:
if not getattr( trans.app.config, "serve_xss_vulnerable_mimetypes", True ):
mime = DEFAULT_MIME_TYPE
trans.response.set_content_type( mime )
@dataproviders.decorators.has_dataproviders
class Text( Data ):
edam_format = "format_2330"
file_ext = 'txt'
line_class = 'line'
# Add metadata elements
MetadataElement( name="data_lines", default=0, desc="Number of data lines", readonly=True, optional=True, visible=False, no_value=0 )
def write_from_stream(self, dataset, stream):
"""Writes data from a stream"""
# write it twice for now
fd, temp_name = tempfile.mkstemp()
while True:
chunk = stream.read(1048576)
if not chunk:
break
os.write(fd, chunk)
os.close(fd)
# rewrite the file with unix newlines
fp = open(dataset.file_name, 'w')
for line in file(temp_name, "U"):
line = line.strip() + '\n'
fp.write(line)
fp.close()
def set_raw_data(self, dataset, data):
"""Saves the data on the disc"""
fd, temp_name = tempfile.mkstemp()
os.write(fd, data)
os.close(fd)
# rewrite the file with unix newlines
fp = open(dataset.file_name, 'w')
for line in file(temp_name, "U"):
line = line.strip() + '\n'
fp.write(line)
fp.close()
os.remove( temp_name )
def get_mime(self):
"""Returns the mime type of the datatype"""
return 'text/plain'
def set_meta( self, dataset, **kwd ):
"""
Set the number of lines of data in dataset.
"""
dataset.metadata.data_lines = self.count_data_lines(dataset)
def estimate_file_lines( self, dataset ):
"""
Perform a rough estimate by extrapolating number of lines from a small read.
"""
sample_size = 1048576
dataset_fh = open( dataset.file_name )
dataset_read = dataset_fh.read(sample_size)
dataset_fh.close()
sample_lines = dataset_read.count('\n')
est_lines = int(sample_lines * (float(dataset.get_size()) / float(sample_size)))
return est_lines
def count_data_lines(self, dataset):
"""
Count the number of lines of data in dataset,
skipping all blank lines and comments.
"""
data_lines = 0
for line in file( dataset.file_name ):
line = line.strip()
if line and not line.startswith( '#' ):
data_lines += 1
return data_lines
def set_peek( self, dataset, line_count=None, is_multi_byte=False, WIDTH=256, skipchars=None, line_wrap=True ):
"""
Set the peek. This method is used by various subclasses of Text.
"""
if not dataset.dataset.purged:
# The file must exist on disk for the get_file_peek() method
dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte, WIDTH=WIDTH, skipchars=skipchars, line_wrap=line_wrap )
if line_count is None:
# See if line_count is stored in the metadata
if dataset.metadata.data_lines:
dataset.blurb = "%s %s" % ( util.commaify( str(dataset.metadata.data_lines) ), inflector.cond_plural(dataset.metadata.data_lines, self.line_class) )
else:
# Number of lines is not known ( this should not happen ), and auto-detect is
# needed to set metadata
# This can happen when the file is larger than max_optional_metadata_filesize.
if int(dataset.get_size()) <= 1048576:
# Small dataset, recount all lines and reset peek afterward.
lc = self.count_data_lines(dataset)
dataset.metadata.data_lines = lc
dataset.blurb = "%s %s" % ( util.commaify( str(lc) ), inflector.cond_plural(lc, self.line_class) )
else:
est_lines = self.estimate_file_lines(dataset)
dataset.blurb = "~%s %s" % ( util.commaify(util.roundify(str(est_lines))), inflector.cond_plural(est_lines, self.line_class) )
else:
dataset.blurb = "%s %s" % ( util.commaify( str(line_count) ), inflector.cond_plural(line_count, self.line_class) )
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def split( cls, input_datasets, subdir_generator_function, split_params):
"""
Split the input files by line.
"""
if split_params is None:
return
if len(input_datasets) > 1:
raise Exception("Text file splitting does not support multiple files")
input_files = [ds.file_name for ds in input_datasets]
lines_per_file = None
chunk_size = None
if split_params['split_mode'] == 'number_of_parts':
lines_per_file = []
# Computing the length is expensive!
def _file_len(fname):
i = 0
f = open(fname)
for i, _ in enumerate(f):
pass
f.close()
return i + 1
length = _file_len(input_files[0])
parts = int(split_params['split_size'])
if length < parts:
parts = length
len_each, remainder = divmod(length, parts)
while length > 0:
chunk = len_each
if remainder > 0:
chunk += 1
lines_per_file.append(chunk)
remainder -= 1
length -= chunk
elif split_params['split_mode'] == 'to_size':
chunk_size = int(split_params['split_size'])
else:
raise Exception('Unsupported split mode %s' % split_params['split_mode'])
f = open(input_files[0], 'r')
try:
chunk_idx = 0
file_done = False
part_file = None
while not file_done:
if lines_per_file is None:
this_chunk_size = chunk_size
elif chunk_idx < len(lines_per_file):
this_chunk_size = lines_per_file[chunk_idx]
chunk_idx += 1
lines_remaining = this_chunk_size
part_file = None
while lines_remaining > 0:
a_line = f.readline()
if a_line == '':
file_done = True
break
if part_file is None:
part_dir = subdir_generator_function()
part_path = os.path.join(part_dir, os.path.basename(input_files[0]))
part_file = open(part_path, 'w')
part_file.write(a_line)
lines_remaining -= 1
if part_file is not None:
part_file.close()
except Exception as e:
log.error('Unable to split files: %s' % str(e))
f.close()
if part_file is not None:
part_file.close()
raise
f.close()
split = classmethod(split)
# ------------- Dataproviders
@dataproviders.decorators.dataprovider_factory( 'line', dataproviders.line.FilteredLineDataProvider.settings )
def line_dataprovider( self, dataset, **settings ):
"""
Returns an iterator over the dataset's lines (that have been `strip`ed)
optionally excluding blank lines and lines that start with a comment character.
"""
dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
return dataproviders.line.FilteredLineDataProvider( dataset_source, **settings )
@dataproviders.decorators.dataprovider_factory( 'regex-line', dataproviders.line.RegexLineDataProvider.settings )
def regex_line_dataprovider( self, dataset, **settings ):
"""
Returns an iterator over the dataset's lines
optionally including/excluding lines that match one or more regex filters.
"""
dataset_source = dataproviders.dataset.DatasetDataProvider( dataset )
return dataproviders.line.RegexLineDataProvider( dataset_source, **settings )
class GenericAsn1( Text ):
"""Class for generic ASN.1 text format"""
file_ext = 'asn1'
class LineCount( Text ):
"""
Dataset contains a single line with a single integer that denotes the
line count for a related dataset. Used for custom builds.
"""
pass
class Newick( Text ):
"""New Hampshire/Newick Format"""
edam_format = "format_1910"
file_ext = "nhx"
def __init__(self, **kwd):
"""Initialize foobar datatype"""
Text.__init__( self, **kwd )
def init_meta( self, dataset, copy_from=None ):
Text.init_meta( self, dataset, copy_from=copy_from )
def sniff( self, filename ):
""" Returning false as the newick format is too general and cannot be sniffed."""
return False
def get_visualizations( self, dataset ):
"""
Returns a list of visualizations for datatype.
"""
return [ 'phyloviz' ]
class Nexus( Text ):
"""Nexus format as used By Paup, Mr Bayes, etc"""
edam_format = "format_1912"
file_ext = "nex"
def __init__(self, **kwd):
"""Initialize foobar datatype"""
Text.__init__( self, **kwd )
def init_meta( self, dataset, copy_from=None ):
Text.init_meta( self, dataset, copy_from=copy_from )
def sniff( self, filename ):
"""All Nexus Files Simply puts a '#NEXUS' in its first line"""
f = open( filename, "r" )
firstline = f.readline().upper()
f.close()
if "#NEXUS" in firstline:
return True
else:
return False
def get_visualizations( self, dataset ):
"""
Returns a list of visualizations for datatype.
"""
return [ 'phyloviz' ]
# ------------- Utility methods --------------
# nice_size used to be here, but to resolve cyclical dependencies it's been
# moved to galaxy.util. It belongs there anyway since it's used outside
# datatypes.
nice_size = util.nice_size
def get_test_fname( fname ):
"""Returns test data filename"""
path = os.path.dirname(__file__)
full_path = os.path.join( path, 'test', fname )
return full_path
def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5, skipchars=None, line_wrap=True ):
"""
Returns the first LINE_COUNT lines wrapped to WIDTH
## >>> fname = get_test_fname('4.bed')
## >>> get_file_peek(fname)
## 'chr22 30128507 31828507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +\n'
"""
# Set size for file.readline() to a negative number to force it to
# read until either a newline or EOF. Needed for datasets with very
# long lines.
if WIDTH == 'unlimited':
WIDTH = -1
if skipchars is None:
skipchars = []
lines = []
count = 0
file_type = None
data_checked = False
temp = open( file_name, "U" )
last_line = ''
while count <= LINE_COUNT:
line = last_line + temp.readline( WIDTH - len( last_line ) )
if line and not is_multi_byte and not data_checked:
# See if we have a compressed or binary file
if line[0:2] == util.gzip_magic:
file_type = 'gzipped'
break
else:
for char in line:
if ord( char ) > 128:
file_type = 'binary'
break
data_checked = True
if file_type in [ 'gzipped', 'binary' ]:
break
if not line_wrap:
if '\n' in line:
i = line.index( '\n' )
last_line = line[i:]
line = line[:i]
else:
last_line = ''
while True:
i = temp.read(1)
if not i or i == '\n':
break
skip_line = False
for skipchar in skipchars:
if line.startswith( skipchar ):
skip_line = True
break
if not skip_line:
lines.append( line )
count += 1
temp.close()
if file_type in [ 'gzipped', 'binary' ]:
text = "%s file" % file_type
else:
try:
text = util.unicodify( '\n'.join( lines ) )
except UnicodeDecodeError:
text = "binary/unknown file"
return text | messagetype = "error"
return error, msg, messagetype |
main.py | import os.path
import tensorflow as tf
import helper
import ImageProcessor
import warnings
from distutils.version import LooseVersion
import project_tests as tests
import scipy.misc
from glob import glob
from moviepy.editor import VideoFileClip
import time
import timeit
# Check TensorFlow Version
assert LooseVersion(tf.__version__) >= LooseVersion(
'1.0'), 'Please use TensorFlow version 1.0 or newer. You are using {}'.format(tf.__version__)
print('TensorFlow Version: {}'.format(tf.__version__))
# Check for a GPU
if not tf.test.gpu_device_name():
warnings.warn(
'No GPU found. Please use a GPU to train your neural network.')
else:
print('Default GPU Device: {}'.format(tf.test.gpu_device_name()))
def load_vgg(sess, vgg_path):
"""
Load Pretrained VGG Model into TensorFlow.
:param sess: TensorFlow Session
:param vgg_path: Path to vgg folder, containing "variables/" and "saved_model.pb"
:return: Tuple of Tensors from VGG model (image_input, keep_prob, layer3_out, layer4_out, layer7_out)
"""
# Use tf.saved_model.loader.load to load the model and weights
vgg_tag = 'vgg16'
vgg_input_tensor_name = 'image_input:0'
vgg_keep_prob_tensor_name = 'keep_prob:0'
vgg_layer3_out_tensor_name = 'layer3_out:0'
vgg_layer4_out_tensor_name = 'layer4_out:0'
vgg_layer7_out_tensor_name = 'layer7_out:0'
tf.saved_model.loader.load(sess, [vgg_tag], vgg_path)
graph = tf.get_default_graph()
image_input = graph.get_tensor_by_name(vgg_input_tensor_name)
keep_prob = graph.get_tensor_by_name(vgg_keep_prob_tensor_name)
layer3_out = graph.get_tensor_by_name(vgg_layer3_out_tensor_name)
layer4_out = graph.get_tensor_by_name(vgg_layer4_out_tensor_name)
layer7_out = graph.get_tensor_by_name(vgg_layer7_out_tensor_name)
return image_input, keep_prob, layer3_out, layer4_out, layer7_out
tests.test_load_vgg(load_vgg, tf)
def layers(vgg_layer3_out, vgg_layer4_out, vgg_layer7_out, num_classes):
"""
Create the layers for a fully convolutional network. Build skip-layers using the vgg layers.
:param vgg_layer3_out: TF Tensor for VGG Layer 3 tensor
:param vgg_layer4_out: TF Tensor for VGG Layer 4 tensor
:param vgg_layer7_out: TF Tensor for VGG Layer 7 tensor
:param num_classes: Number of classes to classify
:return: The Tensor for the last layer of tensor
"""
# Outputs of pooling layers 3 and 4 are scaled before they are fed into
# the 1x1 convolutions.
vgg_layer3_out = tf.multiply(vgg_layer3_out, 0.0001)
vgg_layer4_out = tf.multiply(vgg_layer4_out, 0.01)
regularizer = tf.contrib.layers.l2_regularizer(1e-3)
conv_1x1_l3 = tf.layers.conv2d(vgg_layer3_out, num_classes, 1, padding='same',
kernel_regularizer=regularizer)
conv_1x1_l4 = tf.layers.conv2d(vgg_layer4_out, num_classes, 1, padding='same',
kernel_regularizer=regularizer)
conv_1x1_l7 = tf.layers.conv2d(vgg_layer7_out, num_classes, 1, padding='same',
kernel_regularizer=regularizer)
tensor = tf.layers.conv2d_transpose(
conv_1x1_l7, num_classes, 4, strides=(2, 2), padding='same', kernel_regularizer=regularizer)
tensor = tf.add(tensor, conv_1x1_l4)
tensor = tf.layers.conv2d_transpose(
tensor, num_classes, 4, strides=(2, 2), padding='same', kernel_regularizer=regularizer)
tensor = tf.add(tensor, conv_1x1_l3)
tensor = tf.layers.conv2d_transpose(
tensor, num_classes, 16, strides=(8, 8), padding='same', kernel_regularizer=regularizer)
return tensor
tests.test_layers(layers)
def optimize(nn_last_layer, correct_label, learning_rate, num_classes):
"""
Build the TensorFLow loss and optimizer operations.
:param nn_last_layer: TF Tensor of the last layer in the neural network
:param correct_label: TF Placeholder for the correct label image
:param learning_rate: TF Placeholder for the learning rate
:param num_classes: Number of classes to classify
:return: Tuple of (logits, train_op, cross_entropy_loss)
"""
logits = tf.reshape(nn_last_layer, (-1, num_classes))
labels = tf.reshape(correct_label, (-1, num_classes))
cross_entropy_loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=labels))
'''
When adding l2-regularization, setting a regularizer in the arguments of
the tf.layers is not enough. Regularization loss terms must be manually
added to your loss function. otherwise regularization is not implemented.
'''
regularization_losses = tf.get_collection(
tf.GraphKeys.REGULARIZATION_LOSSES)
cross_entropy_loss = tf.add(cross_entropy_loss, sum(regularization_losses))
optimizer = tf.train.AdamOptimizer(learning_rate)
train_op = optimizer.minimize(cross_entropy_loss)
return logits, train_op, cross_entropy_loss
tests.test_optimize(optimize)
def train_nn(sess, step, batch_size, get_batches_fn, train_op, cross_entropy_loss, input_image,
correct_label, keep_prob, learning_rate, saver=None):
"""
Train neural network and print out the loss during training.
:param sess: TF Session
:param batch_size: Batch size
:param get_batches_fn: Function to get batches of training data. Call using get_batches_fn(batch_size)
:param train_op: TF Operation to train the neural network
:param cross_entropy_loss: TF Tensor for the amount of loss
:param input_image: TF Placeholder for input images
:param correct_label: TF Placeholder for label images
:param keep_prob: TF Placeholder for dropout keep probability
:param learning_rate: TF Placeholder for learning rate
"""
for image, label in (get_batches_fn(batch_size)):
_, loss = sess.run(
[train_op, cross_entropy_loss], feed_dict={input_image: image, correct_label: label,
keep_prob: 1.0, learning_rate: 1e-4})
print('Epoch: {} loss: {:.3f}'.format(step + 1, loss))
if saver:
saver.save(sess, "./ckpts/model.ckpt", global_step=step)
return loss
#tests.test_train_nn(train_nn)
def run():
batches = 13
epochs = 80
restore_model = True
training = True
compute_iou = True
save_inference_samples = True
do_exteranl_tests = False
save_graph = True
image_shape = (160, 576)
data_dir = './data'
runs_dir = './runs'
# Change following to switch datasets
dataset = helper.KittiDataset(data_dir, image_shape)
num_classes = dataset.get_num_classes()
tests.test_for_kitti_dataset(data_dir)
# Download pretrained vgg model
helper.maybe_download_pretrained_vgg(data_dir)
with tf.Session() as sess:
correct_label = tf.placeholder(
tf.int32, [None, None, None, num_classes])
learning_rate = tf.placeholder(tf.float32)
# Path to vgg model
vgg_path = os.path.join(data_dir, 'vgg')
# Create function to get batches
get_batches_fn = dataset.gen_batch_function()
input_image, keep_prob, layer3_out, layer4_out, layer7_out = load_vgg(
sess, vgg_path)
tensor = layers(layer3_out, layer4_out, layer7_out, num_classes)
logits, optimizer, cross_entropy_loss = optimize(tensor, correct_label, learning_rate,
num_classes)
if compute_iou:
predictions = tf.argmax(tf.nn.softmax(tensor), axis=-1)
gt = tf.argmax(correct_label, axis=-1)
mean_iou, iou_update_op = tf.metrics.mean_iou(
gt, predictions, num_classes)
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
saver = tf.train.Saver(max_to_keep=2, keep_checkpoint_every_n_hours=1)
restore_path = tf.train.latest_checkpoint('./ckpts/') | if restore_path and restore_model:
print("Resotring model from: %s " % restore_path)
saver.restore(sess, restore_path)
for step in range(epochs):
if training:
print("Training...")
start_time = timeit.default_timer()
loss = train_nn(sess, step, batches, get_batches_fn, optimizer, cross_entropy_loss, input_image,
correct_label, keep_prob, learning_rate, saver)
elapsed = timeit.default_timer() - start_time
print('Epoch: {} loss: {:.3f} time: {:.3f}'.format(step + 1, loss, elapsed))
if save_inference_samples:
print("Saving inference samples...")
dataset.save_inference_samples(
runs_dir, sess, logits, keep_prob, input_image)
#compute mean_iou on traning images
if compute_iou:
print("Computing IOU...")
mean_ious = []
for image, label in (get_batches_fn(batches)):
sess.run([predictions, iou_update_op], feed_dict={
input_image: image, correct_label: label, keep_prob: 1})
# Avoiding headaches
# http://ronny.rest/blog/post_2017_09_11_tf_metrics/
mean_ious.append(sess.run(mean_iou))
print("Mean IOU: {:.3f}".format(sum(mean_ious) / len(mean_ious)))
if do_exteranl_tests:
print("Processing test images...")
processor = ImageProcessor.ImageProcessor(
image_shape, sess, logits, keep_prob, input_image)
for idx, image_file in enumerate(glob("./test_images/*.jpg")):
image = scipy.misc.imread(image_file)
image = processor.process_image(image)
scipy.misc.imsave(os.path.join(
"output_images", str(idx) + ".png"), image)
print("Processing test video...")
videoname = 'test_video'
output_file = videoname + '_output.mp4'
input_file = videoname + '.mp4'
clip = VideoFileClip(input_file)
video_clip = clip.fl_image(processor.process_image)
video_clip.write_videofile(output_file, audio=False)
if save_graph:
print("Saving graph...")
# Save GraphDef
tf.train.write_graph(sess.graph_def,'.','graph.pb', as_text=False)
print("Done.")
if __name__ == '__main__':
run() | |
constants.ts | /* tslint:disable */
/* eslint-disable */
/*
* Autogenerated by @creditkarma/thrift-typescript v{{VERSION}}
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*/ | export const SHARED_INT: number = 45; |
|
def 20.py | import random | n3 = str(input('nome 3='))
n4 = str(input('nome 4='))
lista = [n1, n2, n3, n4]
random.shuffle(lista)
print('nova ordem{}'.format(lista)) | n1 = str(input('nome 1='))
n2 = str(input('nome 2=')) |
mod.rs | //! **Canonicalization** is the key to constructing a query in the
//! middle of type inference. Ordinarily, it is not possible to store
//! types from type inference in query keys, because they contain
//! references to inference variables whose lifetimes are too short
//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
//! produces two things:
//!
//! - a value T2 where each unbound inference variable has been
//! replaced with a **canonical variable**;
//! - a map M (of type `CanonicalVarValues`) from those canonical
//! variables back to the original.
//!
//! We can then do queries using T2. These will give back constraints
//! on the canonical variables which can be translated, using the map
//! M, into constraints in our source context. This process of
//! translating the results back is done by the
//! `instantiate_query_result` method.
//!
//! For a more detailed look at what is happening here, check
//! out the [chapter in the rustc dev guide][c].
//!
//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html
use crate::infer::{ConstVariableOrigin, ConstVariableOriginKind};
use crate::infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin, TypeVariableOriginKind};
use rustc_index::vec::IndexVec;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::subst::GenericArg;
use rustc_middle::ty::{self, BoundVar, List};
use rustc_span::source_map::Span;
pub use rustc_middle::infer::canonical::*;
use substitute::CanonicalExt;
mod canonicalizer;
pub mod query_response;
mod substitute;
impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Creates a substitution S for the canonical value with fresh
/// inference variables and applies it to the canonical value.
/// Returns both the instantiated result *and* the substitution S.
///
/// This is only meant to be invoked as part of constructing an
/// inference context at the start of a query (see
/// `InferCtxtBuilder::enter_with_canonical`). It basically
/// brings the canonical value "into scope" within your new infcx.
///
/// At the end of processing, the substitution S (once
/// canonicalized) then represents the values that you computed
/// for each of the canonical inputs to your query.
pub fn instantiate_canonical_with_fresh_inference_vars<T>(
&self,
span: Span,
canonical: &Canonical<'tcx, T>,
) -> (T, CanonicalVarValues<'tcx>)
where
T: TypeFoldable<'tcx>,
{
// For each universe that is referred to in the incoming
// query, create a universe in our local inference context. In
// practice, as of this writing, all queries have no universes
// in them, so this code has no effect, but it is looking
// forward to the day when we *do* want to carry universes
// through into queries.
let universes: IndexVec<ty::UniverseIndex, _> = std::iter::once(ty::UniverseIndex::ROOT)
.chain((0..canonical.max_universe.as_u32()).map(|_| self.create_next_universe()))
.collect();
let canonical_inference_vars =
self.instantiate_canonical_vars(span, canonical.variables, |ui| universes[ui]);
let result = canonical.substitute(self.tcx, &canonical_inference_vars);
(result, canonical_inference_vars)
}
/// Given the "infos" about the canonical variables from some
/// canonical, creates fresh variables with the same
/// characteristics (see `instantiate_canonical_var` for
/// details). You can then use `substitute` to instantiate the
/// canonical variable with these inference variables.
fn | (
&self,
span: Span,
variables: &List<CanonicalVarInfo<'tcx>>,
universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex,
) -> CanonicalVarValues<'tcx> {
let var_values: IndexVec<BoundVar, GenericArg<'tcx>> = variables
.iter()
.map(|info| self.instantiate_canonical_var(span, info, &universe_map))
.collect();
CanonicalVarValues { var_values }
}
/// Given the "info" about a canonical variable, creates a fresh
/// variable for it. If this is an existentially quantified
/// variable, then you'll get a new inference variable; if it is a
/// universally quantified variable, you get a placeholder.
fn instantiate_canonical_var(
&self,
span: Span,
cv_info: CanonicalVarInfo<'tcx>,
universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex,
) -> GenericArg<'tcx> {
match cv_info.kind {
CanonicalVarKind::Ty(ty_kind) => {
let ty = match ty_kind {
CanonicalTyVarKind::General(ui) => self.next_ty_var_in_universe(
TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span },
universe_map(ui),
),
CanonicalTyVarKind::Int => self.next_int_var(),
CanonicalTyVarKind::Float => self.next_float_var(),
};
ty.into()
}
CanonicalVarKind::PlaceholderTy(ty::PlaceholderType { universe, name }) => {
let universe_mapped = universe_map(universe);
let placeholder_mapped = ty::PlaceholderType { universe: universe_mapped, name };
self.tcx.mk_ty(ty::Placeholder(placeholder_mapped)).into()
}
CanonicalVarKind::Region(ui) => self
.next_region_var_in_universe(
RegionVariableOrigin::MiscVariable(span),
universe_map(ui),
)
.into(),
CanonicalVarKind::PlaceholderRegion(ty::PlaceholderRegion { universe, name }) => {
let universe_mapped = universe_map(universe);
let placeholder_mapped = ty::PlaceholderRegion { universe: universe_mapped, name };
self.tcx.mk_region(ty::RePlaceholder(placeholder_mapped)).into()
}
CanonicalVarKind::Const(ui, ty) => self
.next_const_var_in_universe(
ty,
ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span },
universe_map(ui),
)
.into(),
CanonicalVarKind::PlaceholderConst(ty::PlaceholderConst { universe, name }) => {
let universe_mapped = universe_map(universe);
let placeholder_mapped = ty::PlaceholderConst { universe: universe_mapped, name };
self.tcx
.mk_const(ty::ConstS {
val: ty::ConstKind::Placeholder(placeholder_mapped),
ty: name.ty,
})
.into()
}
}
}
}
| instantiate_canonical_vars |
target.rs | #![allow(dead_code)]
#![allow(unused_imports)]
//! Target-specific definitions
use vcell::VolatileCell;
use core::marker::PhantomData;
#[cfg(feature = "cortex-m")]
pub use cortex_m::interrupt;
#[cfg(feature = "riscv")]
pub use riscv::interrupt;
use crate::ral::{otg_global, otg_device, otg_pwrclk, otg_fifo};
use crate::UsbPeripheral;
pub fn fifo_write(channel: impl Into<usize>, mut buf: &[u8]) {
let fifo = otg_fifo::instance(channel.into());
while buf.len() >= 4 {
let mut u32_bytes = [0u8; 4];
u32_bytes.copy_from_slice(&buf[..4]);
buf = &buf[4..];
fifo.write(u32::from_ne_bytes(u32_bytes));
}
if buf.len() > 0 {
let mut u32_bytes = [0u8; 4];
u32_bytes[..buf.len()].copy_from_slice(buf);
fifo.write(u32::from_ne_bytes(u32_bytes));
}
}
pub fn | (mut buf: &mut [u8]) {
let fifo = otg_fifo::instance(0);
while buf.len() >= 4 {
let word = fifo.read();
let bytes = word.to_ne_bytes();
buf[..4].copy_from_slice(&bytes);
buf = &mut buf[4..];
}
if buf.len() > 0 {
let word = fifo.read();
let bytes = word.to_ne_bytes();
buf.copy_from_slice(&bytes[..buf.len()]);
}
}
pub fn fifo_read_into(buf: &[VolatileCell<u32>]) {
let fifo = otg_fifo::instance(0);
for p in buf {
let word = fifo.read();
p.set(word);
}
}
/// Wrapper around device-specific peripheral that provides unified register interface
pub struct UsbRegisters<USB> {
pub global: otg_global::Instance,
pub device: otg_device::Instance,
pub pwrclk: otg_pwrclk::Instance,
_marker: PhantomData<USB>,
}
unsafe impl<USB> Send for UsbRegisters<USB> {}
impl<USB: UsbPeripheral> UsbRegisters<USB> {
pub fn new() -> Self {
Self {
global: unsafe { otg_global::OTG_GLOBAL::steal() },
device: unsafe { otg_device::OTG_DEVICE::steal() },
pwrclk: unsafe { otg_pwrclk::OTG_PWRCLK::steal() },
_marker: PhantomData,
}
}
}
| fifo_read |
proxy.go | package proxy
import (
"context"
"crypto/tls"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"strconv"
"strings"
"time"
log "github.com/sirupsen/logrus"
"github.com/stripe/stripe-cli/pkg/ansi"
"github.com/stripe/stripe-cli/pkg/config"
"github.com/stripe/stripe-cli/pkg/requests"
"github.com/stripe/stripe-cli/pkg/stripe"
"github.com/stripe/stripe-cli/pkg/stripeauth"
"github.com/stripe/stripe-cli/pkg/websocket"
)
//
// Public types
//
// EndpointRoute describes a local endpoint's routing configuration.
type EndpointRoute struct {
// URL is the endpoint's URL.
URL string
// Headers to forward to endpoints
ForwardHeaders []string
// Connect indicates whether the endpoint should receive normal (when false) or Connect (when true) events.
Connect bool
// EventTypes is the list of event types that should be sent to the endpoint.
EventTypes []string
}
// EndpointResponse describes the response to a Stripe event from an endpoint
type EndpointResponse struct {
Event *StripeEvent
Resp *http.Response
RespBody string
}
// FailedToReadResponseError describes a failure to read the response from an endpoint
type FailedToReadResponseError struct {
Err error
}
func (f FailedToReadResponseError) Error() string {
return f.Err.Error()
}
// Config provides the configuration of a Proxy
type Config struct {
// DeviceName is the name of the device sent to Stripe to help identify the device
DeviceName string
// Key is the API key used to authenticate with Stripe
Key string
// URL to which requests are sent
APIBaseURL string
// URL to which events are forwarded to
ForwardURL string
// Headers to inject when forwarding events
ForwardHeaders []string
// URL to which Connect events are forwarded to
ForwardConnectURL string
// Headers to inject when forwarding Connect events
ForwardConnectHeaders []string
// UseConfiguredWebhooks loads webhooks config from user's account
UseConfiguredWebhooks bool
// EndpointsRoutes is a mapping of local webhook endpoint urls to the events they consume
EndpointRoutes []EndpointRoute
// List of events to listen and proxy
Events []string
// WebSocketFeature is the feature specified for the websocket connection
WebSocketFeature string
// Indicates whether to print full JSON objects to stdout
PrintJSON bool
// Specifies the format to print to stdout.
Format string
// Indicates whether to filter events formatted with the default or latest API version
UseLatestAPIVersion bool
// Indicates whether to skip certificate verification when forwarding webhooks to HTTPS endpoints
SkipVerify bool
// The logger used to log messages to stdin/err
Log *log.Logger
// Force use of unencrypted ws:// protocol instead of wss://
NoWSS bool
// OutCh is the channel to send logs and statuses to for processing in other packages
OutCh chan websocket.IElement
}
// A Proxy opens a websocket connection with Stripe, listens for incoming
// webhook events, forwards them to the local endpoint and sends the response
// back to Stripe.
type Proxy struct {
cfg *Config
endpointClients []*EndpointClient
stripeAuthClient *stripeauth.Client
webSocketClient *websocket.Client
// Events is the supported event types for the command
events map[string]bool
}
const maxConnectAttempts = 3
// Run sets the websocket connection and starts the Goroutines to forward
// incoming events to the local endpoint.
func (p *Proxy) Run(ctx context.Context) error {
defer close(p.cfg.OutCh)
p.cfg.OutCh <- websocket.StateElement{
State: websocket.Loading,
}
var nAttempts int = 0
for nAttempts < maxConnectAttempts {
session, err := p.createSession(ctx)
if err != nil {
p.cfg.OutCh <- websocket.ErrorElement{
Error: fmt.Errorf("Error while authenticating with Stripe: %v", err),
}
return err
}
p.webSocketClient = websocket.NewClient(
session.WebSocketURL,
session.WebSocketID,
session.WebSocketAuthorizedFeature,
&websocket.Config{
Log: p.cfg.Log,
NoWSS: p.cfg.NoWSS,
ReconnectInterval: time.Duration(session.ReconnectDelay) * time.Second,
EventHandler: websocket.EventHandlerFunc(p.processWebhookEvent),
},
)
go func() {
<-p.webSocketClient.Connected()
nAttempts = 0
p.cfg.OutCh <- websocket.StateElement{
State: websocket.Ready,
Data: []string{session.Secret},
}
}()
go p.webSocketClient.Run(ctx)
nAttempts++
select {
case <-ctx.Done():
p.cfg.OutCh <- &websocket.StateElement{
State: websocket.Done,
}
return nil
case <-p.webSocketClient.NotifyExpired:
if nAttempts < maxConnectAttempts {
p.cfg.OutCh <- &websocket.StateElement{
State: websocket.Reconnecting,
}
} else {
err := fmt.Errorf("Session expired. Terminating after %d failed attempts to reauthorize", nAttempts)
p.cfg.OutCh <- websocket.ErrorElement{
Error: err,
}
return err
}
}
}
if p.webSocketClient != nil {
p.webSocketClient.Stop()
}
log.WithFields(log.Fields{
"prefix": "proxy.Proxy.Run",
}).Debug("Bye!")
return nil
}
// GetSessionSecret creates a session and returns the webhook signing secret.
func GetSessionSecret(deviceName, key, baseURL string) (string, error) {
p, err := Init(&Config{
DeviceName: deviceName,
Key: key,
APIBaseURL: baseURL,
EndpointRoutes: make([]EndpointRoute, 0),
WebSocketFeature: "webhooks",
})
if err != nil {
log.WithFields(log.Fields{
"prefix": "proxy.Proxy.GetSessionSecret",
}).Debug(err)
return "", err
}
session, err := p.createSession(context.Background())
if err != nil {
log.WithFields(log.Fields{
"prefix": "proxy.Proxy.GetSessionSecret",
}).Debug(fmt.Sprintf("Error while authenticating with Stripe: %v", err))
return "", err
}
return session.Secret, nil
}
func (p *Proxy) createSession(ctx context.Context) (*stripeauth.StripeCLISession, error) {
var session *stripeauth.StripeCLISession
var err error
exitCh := make(chan struct{})
go func() {
// Try to authorize at least 5 times before failing. Sometimes we have random
// transient errors that we just need to retry for.
for i := 0; i <= 5; i++ {
session, err = p.stripeAuthClient.Authorize(ctx, p.cfg.DeviceName, p.cfg.WebSocketFeature, nil)
if err == nil {
exitCh <- struct{}{}
return
}
select {
case <-ctx.Done():
exitCh <- struct{}{}
return
case <-time.After(1 * time.Second):
}
}
exitCh <- struct{}{}
}()
<-exitCh
return session, err
}
func (p *Proxy) filterWebhookEvent(msg *websocket.WebhookEvent) bool {
if msg.Endpoint.APIVersion != nil && !p.cfg.UseLatestAPIVersion {
p.cfg.Log.WithFields(log.Fields{
"prefix": "proxy.Proxy.filterWebhookEvent",
"api_version": getAPIVersionString(msg.Endpoint.APIVersion),
}).Debugf("Received event with non-default API version, ignoring")
return true
}
if msg.Endpoint.APIVersion == nil && p.cfg.UseLatestAPIVersion {
p.cfg.Log.WithFields(log.Fields{
"prefix": "proxy.Proxy.filterWebhookEvent",
}).Debugf("Received event with default API version, ignoring")
return true
}
return false
}
// This function outputs the event payload in the format specified.
// Currently only supports JSON.
func (p *Proxy) formatOutput(format string, eventPayload string) string {
var event map[string]interface{}
err := json.Unmarshal([]byte(eventPayload), &event)
if err != nil {
p.cfg.Log.Debug("Received malformed event from Stripe, ignoring")
return fmt.Sprint(err)
}
switch strings.ToUpper(format) {
// The distinction between this and PrintJSON is that this output is stripped of all pretty format.
case outputFormatJSON:
outputJSON, _ := json.Marshal(event)
return fmt.Sprintln(ansi.ColorizeJSON(string(outputJSON), false, os.Stdout))
default:
return fmt.Sprintf("Unrecognized output format %s\n" + format)
}
}
func (p *Proxy) processWebhookEvent(msg websocket.IncomingMessage) {
if msg.WebhookEvent == nil {
p.cfg.Log.Debug("WebSocket specified for Webhooks received non-webhook event")
return
}
webhookEvent := msg.WebhookEvent
p.cfg.Log.WithFields(log.Fields{
"prefix": "proxy.Proxy.processWebhookEvent",
"webhook_id": webhookEvent.WebhookID,
"webhook_converesation_id": webhookEvent.WebhookConversationID,
}).Debugf("Processing webhook event")
var evt StripeEvent
err := json.Unmarshal([]byte(webhookEvent.EventPayload), &evt)
if err != nil {
p.cfg.Log.Debug("Received malformed event from Stripe, ignoring")
return
}
p.cfg.Log.WithFields(log.Fields{
"prefix": "proxy.Proxy.processWebhookEvent",
"webhook_id": webhookEvent.WebhookID,
"webhook_conversation_id": webhookEvent.WebhookConversationID,
"event_id": evt.ID,
"event_type": evt.Type,
"api_version": getAPIVersionString(msg.Endpoint.APIVersion),
}).Trace("Webhook event trace")
if p.filterWebhookEvent(webhookEvent) {
return
}
evtCtx := eventContext{
webhookID: webhookEvent.WebhookID,
webhookConversationID: webhookEvent.WebhookConversationID,
event: &evt,
}
if p.events["*"] || p.events[evt.Type] {
p.cfg.OutCh <- websocket.DataElement{
Data: evt,
Marshaled: p.formatOutput(outputFormatJSON, webhookEvent.EventPayload),
}
for _, endpoint := range p.endpointClients {
if endpoint.SupportsEventType(evt.IsConnect(), evt.Type) {
// TODO: handle errors returned by endpointClients
go endpoint.Post(
evtCtx,
webhookEvent.EventPayload,
webhookEvent.HTTPHeaders,
)
}
}
}
}
func (p *Proxy) processEndpointResponse(evtCtx eventContext, forwardURL string, resp *http.Response) {
buf, err := ioutil.ReadAll(resp.Body)
if err != nil {
p.cfg.OutCh <- websocket.ErrorElement{
Error: FailedToReadResponseError{Err: err},
}
return
}
body := truncate(string(buf), maxBodySize, true)
p.cfg.OutCh <- websocket.DataElement{
Data: EndpointResponse{
Event: evtCtx.event,
Resp: resp,
RespBody: body,
},
}
idx := 0
headers := make(map[string]string)
for k, v := range resp.Header {
headers[truncate(k, maxHeaderKeySize, false)] = truncate(v[0], maxHeaderValueSize, true)
idx++
if idx > maxNumHeaders {
break
}
}
if p.webSocketClient != nil {
msg := websocket.NewWebhookResponse(
evtCtx.webhookID,
evtCtx.webhookConversationID,
forwardURL,
resp.StatusCode,
body,
headers,
)
p.webSocketClient.SendMessage(msg)
}
}
//
// Public functions
//
// Init initializes a new Proxy
func Init(cfg *Config) (*Proxy, error) {
if cfg.Log == nil {
cfg.Log = &log.Logger{Out: ioutil.Discard}
}
// validate forward-urls args
if cfg.UseConfiguredWebhooks && len(cfg.ForwardURL) > 0 {
if strings.HasPrefix(cfg.ForwardURL, "/") {
return nil, errors.New("forward_to cannot be a relative path when loading webhook endpoints from the API")
}
if strings.HasPrefix(cfg.ForwardConnectURL, "/") {
return nil, errors.New("forward_connect_to cannot be a relative path when loading webhook endpoints from the API")
}
} else if cfg.UseConfiguredWebhooks && len(cfg.ForwardURL) == 0 {
return nil, errors.New("load_from_webhooks_api requires a location to forward to with forward_to")
}
// if no events are passed, listen for all events
if len(cfg.Events) == 0 {
cfg.Events = []string{"*"}
} else {
for _, event := range cfg.Events {
if _, found := validEvents[event]; !found {
cfg.Log.Infof("Warning: You're attempting to listen for \"%s\", which isn't a valid event\n", event)
}
}
}
// build endpoint routes
var endpointRoutes []EndpointRoute
if cfg.UseConfiguredWebhooks {
// build from user's API config
endpoints := getEndpointsFromAPI(cfg.Key, cfg.APIBaseURL)
if len(endpoints.Data) == 0 {
return nil, errors.New("You have not defined any webhook endpoints on your account. Go to the Stripe Dashboard to add some: https://dashboard.stripe.com/test/webhooks")
}
var err error
endpointRoutes, err = buildEndpointRoutes(endpoints, parseURL(cfg.ForwardURL), parseURL(cfg.ForwardConnectURL), cfg.ForwardHeaders, cfg.ForwardConnectHeaders)
if err != nil {
return nil, err
}
} else {
// build from --forward-to urls
if len(cfg.ForwardConnectURL) == 0 {
cfg.ForwardConnectURL = cfg.ForwardURL
}
if len(cfg.ForwardConnectHeaders) == 0 {
cfg.ForwardConnectHeaders = cfg.ForwardHeaders
}
if len(cfg.ForwardURL) > 0 {
// non-connect endpoints
endpointRoutes = append(endpointRoutes, EndpointRoute{
URL: parseURL(cfg.ForwardURL),
ForwardHeaders: cfg.ForwardHeaders,
Connect: false,
EventTypes: cfg.Events,
})
}
if len(cfg.ForwardConnectURL) > 0 {
// connect endpoints
endpointRoutes = append(endpointRoutes, EndpointRoute{
URL: parseURL(cfg.ForwardConnectURL),
ForwardHeaders: cfg.ForwardConnectHeaders,
Connect: true,
EventTypes: cfg.Events,
})
}
}
p := &Proxy{
cfg: cfg,
stripeAuthClient: stripeauth.NewClient(cfg.Key, &stripeauth.Config{
Log: cfg.Log,
APIBaseURL: cfg.APIBaseURL,
}),
events: convertToMap(cfg.Events),
}
for _, route := range endpointRoutes {
// append to endpointClients
p.endpointClients = append(p.endpointClients, NewEndpointClient(
route.URL,
route.ForwardHeaders,
route.Connect,
route.EventTypes,
&EndpointConfig{
HTTPClient: &http.Client{
CheckRedirect: func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
},
Timeout: defaultTimeout,
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: cfg.SkipVerify},
},
},
Log: p.cfg.Log,
ResponseHandler: EndpointResponseHandlerFunc(p.processEndpointResponse),
OutCh: p.cfg.OutCh,
},
))
}
return p, nil
}
//
// Private types
//
type eventContext struct {
webhookID string
webhookConversationID string
event *StripeEvent
}
//
// Private constants
//
const (
maxBodySize = 5000
maxNumHeaders = 20
maxHeaderKeySize = 50
maxHeaderValueSize = 200
)
const outputFormatJSON = "JSON"
//
// Private functions
//
// truncate will truncate str to be less than or equal to maxByteLength bytes.
// It will respect UTF8 and truncate the string at a code point boundary.
// If ellipsis is true, we'll append "..." to the truncated string if the string
// was in fact truncated, and if there's enough room. Note that the
// full string returned will always be <= maxByteLength bytes long, even with ellipsis.
func truncate(str string, maxByteLength int, ellipsis bool) string {
if len(str) <= maxByteLength {
return str
}
bytes := []byte(str)
if ellipsis && maxByteLength > 3 {
maxByteLength -= 3
} else {
ellipsis = false
}
for maxByteLength > 0 && maxByteLength < len(bytes) && isUTF8ContinuationByte(bytes[maxByteLength]) {
maxByteLength--
}
result := string(bytes[0:maxByteLength])
if ellipsis {
result += "..."
}
return result
}
func isUTF8ContinuationByte(b byte) bool {
return (b & 0xC0) == 0x80
}
// TODO: move to some helper somewhere
// parseURL parses the potentially incomplete URL provided in the configuration
// and returns a full URL
func parseURL(url string) string {
_, err := strconv.Atoi(url)
if err == nil {
// If the input is just a number, assume it's a port number
url = "localhost:" + url
}
if strings.HasPrefix(url, "/") {
// If the input starts with a /, assume it's a relative path
url = "localhost" + url
}
if !strings.HasPrefix(url, "http://") && !strings.HasPrefix(url, "https://") {
// Add the protocol if it's not already there
url = "http://" + url
}
return url
}
func getEndpointsFromAPI(secretKey, apiBaseURL string) requests.WebhookEndpointList {
if apiBaseURL == "" {
apiBaseURL = stripe.DefaultAPIBaseURL
}
return requests.WebhookEndpointsList(apiBaseURL, "2019-03-14", secretKey, &config.Profile{})
}
func | (endpoints requests.WebhookEndpointList, forwardURL, forwardConnectURL string, forwardHeaders []string, forwardConnectHeaders []string) ([]EndpointRoute, error) {
endpointRoutes := make([]EndpointRoute, 0)
for _, endpoint := range endpoints.Data {
u, err := url.Parse(endpoint.URL)
// Silently skip over invalid paths
if err == nil {
// Since webhooks in the dashboard may have a more generic url, only extract
// the path. We'll use this with `localhost` or with the `--forward-to` flag
if endpoint.Application == "" {
url, err := buildForwardURL(forwardURL, u)
if err != nil {
return nil, err
}
endpointRoutes = append(endpointRoutes, EndpointRoute{
URL: url,
ForwardHeaders: forwardHeaders,
Connect: false,
EventTypes: endpoint.EnabledEvents,
})
} else {
url, err := buildForwardURL(forwardConnectURL, u)
if err != nil {
return nil, err
}
endpointRoutes = append(endpointRoutes, EndpointRoute{
URL: url,
ForwardHeaders: forwardConnectHeaders,
Connect: true,
EventTypes: endpoint.EnabledEvents,
})
}
}
}
return endpointRoutes, nil
}
func buildForwardURL(forwardURL string, destination *url.URL) (string, error) {
f, err := url.Parse(forwardURL)
if err != nil {
return "", fmt.Errorf("Provided forward url cannot be parsed: %s", forwardURL)
}
return fmt.Sprintf(
"%s://%s%s%s",
f.Scheme,
f.Host,
strings.TrimSuffix(f.Path, "/"), // avoids having a double "//"
destination.Path,
), nil
}
func getAPIVersionString(str *string) string {
var APIVersion string
if str == nil {
APIVersion = "null"
} else {
APIVersion = *str
}
return APIVersion
}
| buildEndpointRoutes |
make_entry_test.py | from __future__ import absolute_import
from __future__ import unicode_literals
import io
import mock
import os.path
import pytest
import sys
from pypi_practices import five
from pypi_practices.errors import FileValidationError
from pypi_practices.make_entry import make_entry
from testing.util import REMatcher
@pytest.fixture
def fake_entry():
class fake_entry_state(object):
cwd_arg = None
config_arg = None
entry = None
def check_fn(cwd, fix, config):
fake_entry_state.cwd_arg = cwd
fake_entry_state.fix_arg = fix
fake_entry_state.config_arg = config
return 0
fake_entry_state.entry = staticmethod(make_entry(check_fn))
yield fake_entry_state
def test_converts_args_to_text(fake_entry):
# Native str (py2 vs py3)
args = [str('--cwd'), str('path')]
fake_entry.entry(args)
assert type(fake_entry.cwd_arg) is five.text
assert fake_entry.cwd_arg == 'path'
def test_cwd_defaults_to_dot(fake_entry):
fake_entry.entry([])
assert fake_entry.cwd_arg == '.'
def test_fix_calls_fix(fake_entry):
fake_entry.entry(['--fix'])
assert fake_entry.fix_arg is True
def test_ignores_extra_filename_args(fake_entry):
fake_entry.entry(['README.md', 'tox.ini'])
assert fake_entry.cwd_arg == '.'
@pytest.mark.parametrize('args', ([], ['--fix']))
def test_returns_0_for_ok(fake_entry, args):
ret = fake_entry.entry(args)
assert ret == 0
def test_no_args_passed_uses_sys_argv(fake_entry):
with mock.patch.object(sys, 'argv', ['hook-exe', '--cwd', 'foo_cwd']):
fake_entry.entry()
assert fake_entry.cwd_arg == 'foo_cwd'
@pytest.fixture
def print_mock():
with mock.patch.object(five.builtins, 'print') as print_mock:
yield print_mock
def test_ok_prints_nothing(fake_entry, print_mock):
fake_entry.entry([])
assert print_mock.call_count == 0
def test_raises_validation_error(print_mock):
def raising_check(*_):
raise FileValidationError(
'README.md',
'Missing something.'
)
entry = make_entry(raising_check)
ret = entry([])
assert ret == 1
print_mock.assert_called_once_with(
'README.md: Missing something.\n'
'\n'
'Manually edit the file above to fix.'
)
def test_message_contains_line_if_specified(print_mock):
|
def test_auto_fixable_prints_auto_fixable(print_mock):
def raising_check_auto_fixable(*_):
raise FileValidationError(
'README.md',
'Missing something.',
is_auto_fixable=True,
)
entry = make_entry(raising_check_auto_fixable)
ret = entry([])
assert ret == 1
print_mock.assert_called_once_with(
'README.md: Missing something.\n'
'\n'
'To attempt automatic fixing, run with --fix.'
)
def test_passes_config(tmpdir, fake_entry):
config_path = os.path.join(tmpdir.strpath, '.pypi-practices-config.yaml')
with io.open(config_path, 'w') as config_file:
config_file.write('autofix: true')
ret = fake_entry.entry(['--cwd', tmpdir.strpath])
assert ret == 0
assert fake_entry.config_arg == {'autofix': True}
def test_failing_config(tmpdir, fake_entry, print_mock):
config_path = os.path.join(tmpdir.strpath, '.pypi-practices-config.yaml')
with io.open(config_path, 'w') as config_file:
config_file.write('foo: "')
ret = fake_entry.entry(['--cwd', tmpdir.strpath])
assert ret == 1
print_mock.assert_called_once_with(REMatcher(
r'.pypi-practices-config.yaml: Invalid Yaml:\n\n'
r'while scanning a quoted scalar\n'
r' in ".+\.pypi-practices-config.yaml", line 1, column 6\n'
r'found unexpected end of stream\n'
r' in ".+/.pypi-practices-config.yaml", line 1, column 7'
))
| def raising_check_with_line_number(*_):
raise FileValidationError(
'README.md',
'Missing something.',
line=5,
)
entry = make_entry(raising_check_with_line_number)
ret = entry([])
assert ret == 1
print_mock.assert_called_once_with(
'README.md:5: Missing something.\n'
'\n'
'Manually edit the file above to fix.'
) |
cloner.service.ts | import { Injectable } from '@angular/core';
import * as clone from 'clone';
@Injectable({
providedIn: 'root'
})
export class | {
deepClone<T>(value) {
return clone<T>(value);
}
} | ClonerService |
barcode_decoder.py | #!/usr/bin/env python
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import optparse
import os
import sys
import helper_functions
def convert_yuv_to_png_files(yuv_file_name, yuv_frame_width, yuv_frame_height,
output_directory, ffmpeg_dir=None):
"""Converts a YUV video file into PNG frames.
The function uses ffmpeg to convert the YUV file. The output of ffmpeg is in
the form frame_xxxx.png, where xxxx is the frame number, starting from 0001.
Args:
yuv_file_name(string): The name of the YUV file.
yuv_frame_width(int): The width of one YUV frame.
yuv_frame_height(int): The height of one YUV frame.
output_directory(string): The output directory where the PNG frames will be
stored.
ffmpeg_dir(string): The directory containing the ffmpeg executable. If
omitted, the PATH will be searched for it.
Return:
(bool): True if the conversion was OK.
"""
size_string = str(yuv_frame_width) + 'x' + str(yuv_frame_height)
output_files_pattern = os.path.join(output_directory, 'frame_%04d.png')
ffmpeg_executable = 'ffmpeg.exe' if sys.platform == 'win32' else 'ffmpeg'
if ffmpeg_dir:
ffmpeg_executable = os.path.join(ffmpeg_dir, ffmpeg_executable)
command = [ffmpeg_executable, '-s', '%s' % size_string, '-i', '%s'
% yuv_file_name, '-f', 'image2', '-vcodec', 'png',
'%s' % output_files_pattern]
try:
print 'Converting YUV file to PNG images (may take a while)...'
print ' '.join(command)
helper_functions.run_shell_command(
command, fail_msg='Error during YUV to PNG conversion')
except helper_functions.HelperError, err:
print >> sys.stderr, 'Error executing command: %s. Error: %s' % (command,
err)
return False
return True
def decode_frames(input_directory, zxing_dir=None):
"""Decodes the barcodes overlaid in each frame.
The function uses the Zxing command-line tool from the Zxing C++ distribution
to decode the barcode in every PNG frame from the input directory. The frames
should be named frame_xxxx.png, where xxxx is the frame number. The frame
numbers should be consecutive and should start from 0001.
The decoding results in a frame_xxxx.txt file for every successfully decoded
barcode. This file contains the decoded barcode as 12-digit string (UPC-A
format: 11 digits content + one check digit).
Args:
input_directory(string): The input directory from where the PNG frames are
read.
zxing_dir(string): The directory containing the zxing executable. If
omitted, the PATH will be searched for it.
Return:
(bool): True if the decoding went without errors.
"""
zxing_executable = 'zxing.exe' if sys.platform == 'win32' else 'zxing'
if zxing_dir:
zxing_executable = os.path.join(zxing_dir, zxing_executable)
print 'Decoding barcodes from PNG files with %s...' % zxing_executable
return helper_functions.perform_action_on_all_files(
directory=input_directory, file_pattern='frame_',
file_extension='png', start_number=1, action=_decode_barcode_in_file,
command_line_decoder=zxing_executable)
def _decode_barcode_in_file(file_name, command_line_decoder):
"""Decodes the barcode in the upper left corner of a PNG file.
Args:
file_name(string): File name of the PNG file.
command_line_decoder(string): The ZXing command-line decoding tool.
Return:
(bool): True upon success, False otherwise.
"""
command = [command_line_decoder, '--try-harder', '--dump-raw', file_name]
try:
out = helper_functions.run_shell_command(
command, fail_msg='Error during decoding of %s' % file_name)
print 'Image %s : decoded barcode: %s' % (file_name, out)
text_file = open('%s.txt' % file_name[:-4], 'w')
text_file.write(out)
text_file.close()
except helper_functions.HelperError, err:
print >> sys.stderr, 'Barcode in %s cannot be decoded.' % file_name
print >> sys.stderr, err
return False
return True
def _generate_stats_file(stats_file_name, input_directory='.'):
"""Generate statistics file.
The function generates a statistics file. The contents of the file are in the
format <frame_name> <barcode>, where frame name is the name of every frame
(effectively the frame number) and barcode is the decoded barcode. The frames
and the helper .txt files are removed after they have been used.
"""
file_prefix = os.path.join(input_directory, 'frame_')
stats_file = open(stats_file_name, 'w')
print 'Generating stats file: %s' % stats_file_name
for i in range(1, _count_frames_in(input_directory=input_directory) + 1):
frame_number = helper_functions.zero_pad(i)
barcode_file_name = file_prefix + frame_number + '.txt'
png_frame = file_prefix + frame_number + '.png'
entry_frame_number = helper_functions.zero_pad(i-1)
entry = 'frame_' + entry_frame_number + ' '
if os.path.isfile(barcode_file_name):
barcode = _read_barcode_from_text_file(barcode_file_name)
os.remove(barcode_file_name)
if _check_barcode(barcode):
entry += (helper_functions.zero_pad(int(barcode[0:11])) + '\n')
else:
entry += 'Barcode error\n' # Barcode is wrongly detected.
else: # Barcode file doesn't exist.
entry += 'Barcode error\n'
stats_file.write(entry)
os.remove(png_frame)
stats_file.close()
def _read_barcode_from_text_file(barcode_file_name):
"""Reads the decoded barcode for a .txt file.
Args:
barcode_file_name(string): The name of the .txt file.
Return:
(string): The decoded barcode.
"""
barcode_file = open(barcode_file_name, 'r')
barcode = barcode_file.read()
barcode_file.close()
return barcode
def _check_barcode(barcode):
"""Check weather the UPC-A barcode was decoded correctly.
This function calculates the check digit of the provided barcode and compares
it to the check digit that was decoded.
Args:
barcode(string): The barcode (12-digit).
Return:
(bool): True if the barcode was decoded correctly.
"""
if len(barcode) != 12:
return False
r1 = range(0, 11, 2) # Odd digits
r2 = range(1, 10, 2) # Even digits except last
dsum = 0
# Sum all the even digits
for i in r1:
dsum += int(barcode[i])
# Multiply the sum by 3
dsum *= 3
# Add all the even digits except the check digit (12th digit)
for i in r2:
dsum += int(barcode[i])
# Get the modulo 10
dsum = dsum % 10
# If not 0 substract from 10
if dsum != 0:
dsum = 10 - dsum
# Compare result and check digit
return dsum == int(barcode[11])
def | (input_directory = '.'):
"""Calculates the number of frames in the input directory.
The function calculates the number of frames in the input directory. The
frames should be named frame_xxxx.png, where xxxx is the number of the frame.
The numbers should start from 1 and should be consecutive.
Args:
input_directory(string): The input directory.
Return:
(int): The number of frames.
"""
file_prefix = os.path.join(input_directory, 'frame_')
file_exists = True
num = 1
while file_exists:
file_name = (file_prefix + helper_functions.zero_pad(num) + '.png')
if os.path.isfile(file_name):
num += 1
else:
file_exists = False
return num - 1
def _parse_args():
"""Registers the command-line options."""
usage = "usage: %prog [options]"
parser = optparse.OptionParser(usage=usage)
parser.add_option('--zxing_dir', type='string',
help=('The path to the directory where the zxing executable'
'is located. If omitted, it will be assumed to be '
'present in the PATH.'))
parser.add_option('--ffmpeg_dir', type='string', default=None,
help=('The path to the directory where the ffmpeg '
'executable is located. If omitted, it will be '
'assumed to be present in the PATH.'))
parser.add_option('--yuv_frame_width', type='int', default=640,
help='Width of the YUV file\'s frames. Default: %default')
parser.add_option('--yuv_frame_height', type='int', default=480,
help='Height of the YUV file\'s frames. Default: %default')
parser.add_option('--yuv_file', type='string', default='output.yuv',
help='The YUV file to be decoded. Default: %default')
parser.add_option('--stats_file', type='string', default='stats.txt',
help='The output stats file. Default: %default')
parser.add_option('--png_working_dir', type='string', default='.',
help=('The directory for temporary PNG images to be stored '
'in when decoding from YUV before they\'re barcode '
'decoded. If using Windows and a Cygwin-compiled '
'zxing.exe, you should keep the default value to '
'avoid problems. Default: %default'))
options, _args = parser.parse_args()
return options
def _main():
"""The main function.
A simple invocation is:
./webrtc/tools/barcode_tools/barcode_decoder.py
--yuv_file=<path_and_name_of_overlaid_yuv_video>
--yuv_frame_width=640 --yuv_frame_height=480
--stats_file=<path_and_name_to_stats_file>
"""
options = _parse_args()
# Convert the overlaid YUV video into a set of PNG frames.
if not convert_yuv_to_png_files(options.yuv_file, options.yuv_frame_width,
options.yuv_frame_height,
output_directory=options.png_working_dir,
ffmpeg_dir=options.ffmpeg_dir):
print >> sys.stderr, 'An error occurred converting from YUV to PNG frames.'
return -1
# Decode the barcodes from the PNG frames.
if not decode_frames(input_directory=options.png_working_dir,
zxing_dir=options.zxing_dir):
print >> sys.stderr, ('An error occurred decoding barcodes from PNG frames.'
' Have you built the zxing C++ executable?')
return -2
# Generate statistics file.
_generate_stats_file(options.stats_file,
input_directory=options.png_working_dir)
print 'Completed barcode decoding.'
return 0
if __name__ == '__main__':
sys.exit(_main())
| _count_frames_in |
semantic-path.service.d.ts | import { RoutingConfigService } from '../routing-config.service';
import { UrlCommands } from './url-command';
import { UrlParsingService } from './url-parsing.service';
export declare class | {
protected routingConfigService: RoutingConfigService;
protected urlParser: UrlParsingService;
readonly ROOT_URL: string[];
constructor(routingConfigService: RoutingConfigService, urlParser: UrlParsingService);
/**
* Returns the first path alias configured for a given route name. It adds `/` at the beginning.
*/
get(routeName: string): string;
/**
* Transforms the array of url commands. Each command can be:
* a) string - will be left untouched
* b) object { cxRoute: <route name> } - will be replaced with semantic path
* c) object { cxRoute: <route name>, params: { ... } } - same as above, but with passed params
*
* If the first command is the object with the `cxRoute` property, returns an absolute url (with the first element of the array `'/'`)
*/
transform(commands: UrlCommands): any[];
private isRouteCommand;
private shouldOutputAbsolute;
private generateUrlPart;
private standarizeRouteCommand;
private provideParamsValues;
private findPathWithFillableParams;
private getParams;
private getMappedParamName;
}
| SemanticPathService |
asap.py | import torch
import torch.nn.functional as F
from torch.nn import Linear
from torch_geometric.nn import (ASAPooling,
GraphConv, global_mean_pool,
JumpingKnowledge)
class ASAP(torch.nn.Module):
def __init__(self, num_vocab, max_seq_len, node_encoder, emb_dim, num_layers, hidden, ratio=0.8, dropout=0, num_class=0):
super(ASAP, self).__init__()
self.num_class = num_class
self.max_seq_len = max_seq_len
self.node_encoder = node_encoder
self.conv1 = GraphConv(emb_dim, hidden, aggr='mean')
self.convs = torch.nn.ModuleList()
self.pools = torch.nn.ModuleList()
self.convs.extend([
GraphConv(hidden, hidden, aggr='mean')
for i in range(num_layers - 1)
])
self.pools.extend([
ASAPooling(hidden, ratio, dropout=dropout)
for i in range((num_layers) // 2)
])
self.jump = JumpingKnowledge(mode='cat')
self.lin1 = Linear(num_layers * hidden, hidden)
# self.lin2 = Linear(hidden, dataset.num_classes)
if self.num_class > 0: # classification
self.graph_pred_linear = torch.nn.Linear(hidden, self.num_class)
else:
self.graph_pred_linear_list = torch.nn.ModuleList()
for i in range(max_seq_len):
self.graph_pred_linear_list.append(torch.nn.Linear(hidden, num_vocab))
def reset_parameters(self):
self.conv1.reset_parameters()
for conv in self.convs:
conv.reset_parameters()
for pool in self.pools:
pool.reset_parameters()
self.lin1.reset_parameters()
self.lin2.reset_parameters()
def forward(self, data):
x, edge_index, node_depth, batch = data.x, data.edge_index, data.node_depth, data.batch
x = self.node_encoder(x, node_depth.view(-1, )) | for i, conv in enumerate(self.convs):
x = conv(x=x, edge_index=edge_index, edge_weight=edge_weight)
x = F.relu(x)
xs += [global_mean_pool(x, batch)]
if i % 2 == 0 and i < len(self.convs) - 1:
pool = self.pools[i // 2]
x, edge_index, edge_weight, batch, _ = pool(
x=x, edge_index=edge_index, edge_weight=edge_weight,
batch=batch)
x = self.jump(xs)
x = F.relu(self.lin1(x))
x = F.dropout(x, p=0.5, training=self.training)
# x = self.lin2(x)
# return F.log_softmax(x, dim=-1)
if self.num_class > 0:
return self.graph_pred_linear(x)
pred_list = []
for i in range(self.max_seq_len):
pred_list.append(self.graph_pred_linear_list[i](x))
return pred_list
def __repr__(self):
return self.__class__.__name__ |
edge_weight = None
x = F.relu(self.conv1(x, edge_index))
xs = [global_mean_pool(x, batch)] |
596.996787f430662a972536.js | (self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[596,824],{26744:function(e){e.exports={100:"Continue",101:"Switching Protocols",102:"Processing",200:"OK",201:"Created",202:"Accepted",203:"Non-Authoritative Information",204:"No Content",205:"Reset Content",206:"Partial Content",207:"Multi-Status",208:"Already Reported",226:"IM Used",300:"Multiple Choices",301:"Moved Permanently",302:"Found",303:"See Other",304:"Not Modified",305:"Use Proxy",307:"Temporary Redirect",308:"Permanent Redirect",400:"Bad Request",401:"Unauthorized",402:"Payment Required",403:"Forbidden",404:"Not Found",405:"Method Not Allowed",406:"Not Acceptable",407:"Proxy Authentication Required",408:"Request Timeout",409:"Conflict",410:"Gone",411:"Length Required",412:"Precondition Failed",413:"Payload Too Large",414:"URI Too Long",415:"Unsupported Media Type",416:"Range Not Satisfiable",417:"Expectation Failed",418:"I'm a teapot",421:"Misdirected Request",422:"Unprocessable Entity",423:"Locked",424:"Failed Dependency",425:"Unordered Collection",426:"Upgrade Required",428:"Precondition Required",429:"Too Many Requests",431:"Request Header Fields Too Large",451:"Unavailable For Legal Reasons",500:"Internal Server Error",501:"Not Implemented",502:"Bad Gateway",503:"Service Unavailable",504:"Gateway Timeout",505:"HTTP Version Not Supported",506:"Variant Also Negotiates",507:"Insufficient Storage",508:"Loop Detected",509:"Bandwidth Limit Exceeded",510:"Not Extended",511:"Network Authentication Required"}},62621:function(e,t,r){var n=r(23439),o=r(43824),i=e.exports;for(var a in n)n.hasOwnProperty(a)&&(i[a]=n[a]);function s(e){if("string"===typeof e&&(e=o.parse(e)),e.protocol||(e.protocol="https:"),"https:"!==e.protocol)throw new Error('Protocol "'+e.protocol+'" not supported. Expected "https:"');return e}i.request=function(e,t){return e=s(e),n.request.call(this,e,t)},i.get=function(e,t){return e=s(e),n.get.call(this,e,t)}},43824:function(e,t,r){var n,o=(n=r(70574))&&"object"==typeof n&&"default"in n?n.default:n,i=/https?|ftp|gopher|file/;function a(e){"string"==typeof e&&(e=_(e));var t=function(e,t,r){var n=e.auth,o=e.hostname,i=e.protocol||"",a=e.pathname||"",s=e.hash||"",u=e.query||"",l=!1;n=n?encodeURIComponent(n).replace(/%3A/i,":")+"@":"",e.host?l=n+e.host:o&&(l=n+(~o.indexOf(":")?"["+o+"]":o),e.port&&(l+=":"+e.port)),u&&"object"==typeof u&&(u=t.encode(u));var c=e.search||u&&"?"+u||"";return i&&":"!==i.substr(-1)&&(i+=":"),e.slashes||(!i||r.test(i))&&!1!==l?(l="//"+(l||""),a&&"/"!==a[0]&&(a="/"+a)):l||(l=""),s&&"#"!==s[0]&&(s="#"+s),c&&"?"!==c[0]&&(c="?"+c),{protocol:i,host:l,pathname:a=a.replace(/[?#]/g,encodeURIComponent),search:c=c.replace("#","%23"),hash:s}}(e,o,i);return""+t.protocol+t.host+t.pathname+t.search+t.hash}var s="http://",u="w.w",l=s+u,c=/^([a-z0-9.+-]*:\/\/\/)([a-z0-9.+-]:\/*)?/i,d=/https?|ftp|gopher|file/;function f(e,t){var r="string"==typeof e?_(e):e;e="object"==typeof e?a(e):e;var n=_(t),o="";r.protocol&&!r.slashes&&(o=r.protocol,e=e.replace(r.protocol,""),o+="/"===t[0]||"/"===e[0]?"/":""),o&&n.protocol&&(o="",n.slashes||(o=n.protocol,t=t.replace(n.protocol,"")));var i=e.match(c);i&&!n.protocol&&(e=e.substr((o=i[1]+(i[2]||"")).length),/^\/\/[^/]/.test(t)&&(o=o.slice(0,-1)));var u=new URL(e,l+"/"),f=new URL(t,u).toString().replace(l,""),h=n.protocol||r.protocol;return h+=r.slashes||n.slashes?"//":"",!o&&h?f=f.replace(s,h):o&&(f=f.replace(s,"")),d.test(f)||~t.indexOf(".")||"/"===e.slice(-1)||"/"===t.slice(-1)||"/"!==f.slice(-1)||(f=f.slice(0,-1)),o&&(f=o+("/"===f[0]?f.substr(1):f)),f}function h(){}h.prototype.parse=_,h.prototype.format=a,h.prototype.resolve=f,h.prototype.resolveObject=f;var p=/^https?|ftp|gopher|file/,b=/^(.*?)([#?].*)/,y=/^([a-z0-9.+-]*:)(\/{0,3})(.*)/i,g=/^([a-z0-9.+-]*:)?\/\/\/*/i,m=/^([a-z0-9.+-]*:)(\/{0,2})\[(.*)\]$/i;function _(e,t,r){if(void 0===t&&(t=!1),void 0===r&&(r=!1),e&&"object"==typeof e&&e instanceof h)return e;var n=(e=e.trim()).match(b);e=n?n[1].replace(/\\/g,"/")+n[2]:e.replace(/\\/g,"/"),m.test(e)&&"/"!==e.slice(-1)&&(e+="/");var i=!/(^javascript)/.test(e)&&e.match(y),s=g.test(e),c="";i&&(p.test(i[1])||(c=i[1].toLowerCase(),e=""+i[2]+i[3]),i[2]||(s=!1,p.test(i[1])?(c=i[1],e=""+i[3]):e="//"+i[3]),3!==i[2].length&&1!==i[2].length||(c=i[1],e="/"+i[3]));var d,f=(n?n[1]:e).match(/^https?:\/\/[^/]+(:[0-9]+)(?=\/|$)/),_=f&&f[1],v=new h,w="",R="";try{d=new URL(e)}catch(o){w=o,c||r||!/^\/\//.test(e)||/^\/\/.+[@.]/.test(e)||(R="/",e=e.substr(1));try{d=new URL(e,l)}catch(e){return v.protocol=c,v.href=c,v}}v.slashes=s&&!R,v.host=d.host===u?"":d.host,v.hostname=d.hostname===u?"":d.hostname.replace(/(\[|\])/g,""),v.protocol=w?c||null:d.protocol,v.search=d.search.replace(/\\/g,"%5C"),v.hash=d.hash.replace(/\\/g,"%5C");var S=e.split("#");!v.search&&~S[0].indexOf("?")&&(v.search="?"),v.hash||""!==S[1]||(v.hash="#"),v.query=t?o.decode(d.search.substr(1)):v.search.substr(1),v.pathname=R+(i?function(e){return e.replace(/['^|`]/g,(function(e){return"%"+e.charCodeAt().toString(16).toUpperCase()})).replace(/((?:%[0-9A-F]{2})+)/g,(function(e,t){try{return decodeURIComponent(t).split("").map((function(e){var t=e.charCodeAt();return t>256||/^[a-z0-9]$/i.test(e)?e:"%"+t.toString(16).toUpperCase()})).join("")}catch(e){return t}}))}(d.pathname):d.pathname),"about:"===v.protocol&&"blank"===v.pathname&&(v.protocol="",v.pathname=""),w&&"/"!==e[0]&&(v.pathname=v.pathname.substr(1)),c&&!p.test(c)&&"/"!==e.slice(-1)&&"/"===v.pathname&&(v.pathname=""),v.path=v.pathname+v.search,v.auth=[d.username,d.password].map(decodeURIComponent).filter(Boolean).join(":"),v.port=d.port,_&&!v.host.endsWith(_)&&(v.host+=_,v.port=_.slice(1)),v.href=R?""+v.pathname+v.search+v.hash:a(v);var E=/^(file)/.test(v.href)?["host","hostname"]:[];return Object.keys(v).forEach((function(e){~E.indexOf(e)||(v[e]=v[e]||null)})),v}t.parse=_,t.format=a,t.resolve=f,t.resolveObject=function(e,t){return _(f(e,t))},t.Url=h},17523:function(e){"use strict";function t(e,t){return Object.prototype.hasOwnProperty.call(e,t)}e.exports=function(e,r,n,o){r=r||"&",n=n||"=";var i={};if("string"!==typeof e||0===e.length)return i;var a=/\+/g;e=e.split(r);var s=1e3;o&&"number"===typeof o.maxKeys&&(s=o.maxKeys);var u=e.length;s>0&&u>s&&(u=s);for(var l=0;l<u;++l){var c,d,f,h,p=e[l].replace(a,"%20"),b=p.indexOf(n);b>=0?(c=p.substr(0,b),d=p.substr(b+1)):(c=p,d=""),f=decodeURIComponent(c),h=decodeURIComponent(d),t(i,f)?Array.isArray(i[f])?i[f].push(h):i[f]=[i[f],h]:i[f]=h}return i}},73552:function(e){"use strict";var t=function(e){switch(typeof e){case"string":return e;case"boolean":return e?"true":"false";case"number":return isFinite(e)?e:"";default:return""}};e.exports=function(e,r,n,o){return r=r||"&",n=n||"=",null===e&&(e=void 0),"object"===typeof e?Object.keys(e).map((function(o){var i=encodeURIComponent(t(o))+n;return Array.isArray(e[o])?e[o].map((function(e){return i+encodeURIComponent(t(e))})).join(r):i+encodeURIComponent(t(e[o]))})).filter(Boolean).join(r):o?encodeURIComponent(t(o))+n+encodeURIComponent(t(e)):""}},70574:function(e,t,r){"use strict";t.decode=t.parse=r(17523),t.encode=t.stringify=r(73552)},65938:function(e,t){t.endianness=function(){return"LE"},t.hostname=function(){return"undefined"!==typeof location?location.hostname:""},t.loadavg=function(){return[]},t.uptime=function(){return 0},t.freemem=function(){return Number.MAX_VALUE},t.totalmem=function(){return Number.MAX_VALUE},t.cpus=function(){return[]},t.type=function(){return"Browser"},t.release=function(){return"undefined"!==typeof navigator?navigator.appVersion:""},t.networkInterfaces=t.getNetworkInterfaces=function(){return{}},t.arch=function(){return"javascript"},t.platform=function(){return"browser"},t.tmpdir=t.tmpDir=function(){return"/tmp"},t.EOL="\n",t.homedir=function(){return"/"}},23439:function(e,t,r){var n=r(43224),o=r(19291),i=r(51960),a=r(26744),s=r(43824),u=t;u.request=function(e,t){e="string"===typeof e?s.parse(e):i(e);var o=-1===r.g.location.protocol.search(/^https?:$/)?"http:":"",a=e.protocol||o,u=e.hostname||e.host,l=e.port,c=e.path||"/";u&&-1!==u.indexOf(":")&&(u="["+u+"]"),e.url=(u?a+"//"+u:"")+(l?":"+l:"")+c,e.method=(e.method||"GET").toUpperCase(),e.headers=e.headers||{};var d=new n(e);return t&&d.on("response",t),d},u.get=function(e,t){var r=u.request(e,t);return r.end(),r},u.ClientRequest=n,u.IncomingMessage=o.IncomingMessage,u.Agent=function(){},u.Agent.defaultMaxSockets=4,u.globalAgent=new u.Agent,u.STATUS_CODES=a,u.METHODS=["CHECKOUT","CONNECT","COPY","DELETE","GET","HEAD","LOCK","M-SEARCH","MERGE","MKACTIVITY","MKCOL","MOVE","NOTIFY","OPTIONS","PATCH","POST","PROPFIND","PROPPATCH","PURGE","PUT","REPORT","SEARCH","SUBSCRIBE","TRACE","UNLOCK","UNSUBSCRIBE"]},19633:function(e,t,r){var n;function o(){if(void 0!==n)return n;if(r.g.XMLHttpRequest){n=new r.g.XMLHttpRequest;try{n.open("GET",r.g.XDomainRequest?"/":"https://example.com")}catch(e){n=null}}else n=null;return n}function i(e){var t=o();if(!t)return!1;try{return t.responseType=e,t.responseType===e}catch(r){}return!1}function a(e){return"function"===typeof e}t.fetch=a(r.g.fetch)&&a(r.g.ReadableStream),t.writableStream=a(r.g.WritableStream),t.abortController=a(r.g.AbortController),t.arraybuffer=t.fetch||i("arraybuffer"),t.msstream=!t.fetch&&i("ms-stream"),t.mozchunkedarraybuffer=!t.fetch&&i("moz-chunked-arraybuffer"),t.overrideMimeType=t.fetch||!!o()&&a(o().overrideMimeType),n=null},43224:function(e,t,r){var n=r(48834).Buffer,o=r(34406),i=r(19633),a=r(91285),s=r(19291),u=r(27669),l=s.IncomingMessage,c=s.readyStates;var d=e.exports=function(e){var t,r=this;u.Writable.call(r),r._opts=e,r._body=[],r._headers={},e.auth&&r.setHeader("Authorization","Basic "+n.from(e.auth).toString("base64")),Object.keys(e.headers).forEach((function(t){r.setHeader(t,e.headers[t])}));var o=!0;if("disable-fetch"===e.mode||"requestTimeout"in e&&!i.abortController)o=!1,t=!0;else if("prefer-streaming"===e.mode)t=!1;else if("allow-wrong-content-type"===e.mode)t=!i.overrideMimeType;else{if(e.mode&&"default"!==e.mode&&"prefer-fast"!==e.mode)throw new Error("Invalid value for opts.mode");t=!0}r._mode=function(e,t){return i.fetch&&t?"fetch":i.mozchunkedarraybuffer?"moz-chunked-arraybuffer":i.msstream?"ms-stream":i.arraybuffer&&e?"arraybuffer":"text"}(t,o),r._fetchTimer=null,r.on("finish",(function(){r._onFinish()}))};a(d,u.Writable),d.prototype.setHeader=function(e,t){var r=e.toLowerCase();-1===f.indexOf(r)&&(this._headers[r]={name:e,value:t})},d.prototype.getHeader=function(e){var t=this._headers[e.toLowerCase()];return t?t.value:null},d.prototype.removeHeader=function(e){delete this._headers[e.toLowerCase()]},d.prototype._onFinish=function(){var e=this;if(!e._destroyed){var t=e._opts,n=e._headers,a=null;"GET"!==t.method&&"HEAD"!==t.method&&(a=new Blob(e._body,{type:(n["content-type"]||{}).value||""}));var s=[];if(Object.keys(n).forEach((function(e){var t=n[e].name,r=n[e].value;Array.isArray(r)?r.forEach((function(e){s.push([t,e])})):s.push([t,r])})),"fetch"===e._mode){var u=null;if(i.abortController){var l=new AbortController;u=l.signal,e._fetchAbortController=l,"requestTimeout"in t&&0!==t.requestTimeout&&(e._fetchTimer=r.g.setTimeout((function(){e.emit("requestTimeout"),e._fetchAbortController&&e._fetchAbortController.abort()}),t.requestTimeout))}r.g.fetch(e._opts.url,{method:e._opts.method,headers:s,body:a||void 0,mode:"cors",credentials:t.withCredentials?"include":"same-origin",signal:u}).then((function(t){e._fetchResponse=t,e._connect()}),(function(t){r.g.clearTimeout(e._fetchTimer),e._destroyed||e.emit("error",t)}))}else{var d=e._xhr=new r.g.XMLHttpRequest;try{d.open(e._opts.method,e._opts.url,!0)}catch(f){return void o.nextTick((function(){e.emit("error",f)}))}"responseType"in d&&(d.responseType=e._mode),"withCredentials"in d&&(d.withCredentials=!!t.withCredentials),"text"===e._mode&&"overrideMimeType"in d&&d.overrideMimeType("text/plain; charset=x-user-defined"),"requestTimeout"in t&&(d.timeout=t.requestTimeout,d.ontimeout=function(){e.emit("requestTimeout")}),s.forEach((function(e){d.setRequestHeader(e[0],e[1])})),e._response=null,d.onreadystatechange=function(){switch(d.readyState){case c.LOADING:case c.DONE:e._onXHRProgress()}},"moz-chunked-arraybuffer"===e._mode&&(d.onprogress=function(){e._onXHRProgress()}),d.onerror=function(){e._destroyed||e.emit("error",new Error("XHR error"))};try{d.send(a)}catch(f){return void o.nextTick((function(){e.emit("error",f)}))}}}},d.prototype._onXHRProgress=function(){var e=this;(function(e){try{var t=e.status;return null!==t&&0!==t}catch(r){return!1}})(e._xhr)&&!e._destroyed&&(e._response||e._connect(),e._response._onXHRProgress())},d.prototype._connect=function(){var e=this;e._destroyed||(e._response=new l(e._xhr,e._fetchResponse,e._mode,e._fetchTimer),e._response.on("error",(function(t){e.emit("error",t)})),e.emit("response",e._response))},d.prototype._write=function(e,t,r){this._body.push(e),r()},d.prototype.abort=d.prototype.destroy=function(){var e=this;e._destroyed=!0,r.g.clearTimeout(e._fetchTimer),e._response&&(e._response._destroyed=!0),e._xhr?e._xhr.abort():e._fetchAbortController&&e._fetchAbortController.abort()},d.prototype.end=function(e,t,r){"function"===typeof e&&(r=e,e=void 0),u.Writable.prototype.end.call(this,e,t,r)},d.prototype.flushHeaders=function(){},d.prototype.setTimeout=function(){},d.prototype.setNoDelay=function(){},d.prototype.setSocketKeepAlive=function(){};var f=["accept-charset","accept-encoding","access-control-request-headers","access-control-request-method","connection","content-length","cookie","cookie2","date","dnt","expect","host","keep-alive","origin","referer","te","trailer","transfer-encoding","upgrade","via"]},19291:function(e,t,r){var n=r(34406),o=r(48834).Buffer,i=r(19633),a=r(91285),s=r(27669),u=t.readyStates={UNSENT:0,OPENED:1,HEADERS_RECEIVED:2,LOADING:3,DONE:4},l=t.IncomingMessage=function(e,t,a,u){var l=this;if(s.Readable.call(l),l._mode=a,l.headers={},l.rawHeaders=[],l.trailers={},l.rawTrailers=[],l.on("end",(function(){n.nextTick((function(){l.emit("close")}))})),"fetch"===a){if(l._fetchResponse=t,l.url=t.url,l.statusCode=t.status,l.statusMessage=t.statusText,t.headers.forEach((function(e,t){l.headers[t.toLowerCase()]=e,l.rawHeaders.push(t,e)})),i.writableStream){var c=new WritableStream({write:function(e){return new Promise((function(t,r){l._destroyed?r():l.push(o.from(e))?t():l._resumeFetch=t}))},close:function(){r.g.clearTimeout(u),l._destroyed||l.push(null)},abort:function(e){l._destroyed||l.emit("error",e)}});try{return void t.body.pipeTo(c).catch((function(e){r.g.clearTimeout(u),l._destroyed||l.emit("error",e)}))}catch(p){}}var d=t.body.getReader();!function e(){d.read().then((function(t){if(!l._destroyed){if(t.done)return r.g.clearTimeout(u),void l.push(null);l.push(o.from(t.value)),e()}})).catch((function(e){r.g.clearTimeout(u),l._destroyed||l.emit("error",e)}))}()}else{if(l._xhr=e,l._pos=0,l.url=e.responseURL,l.statusCode=e.status,l.statusMessage=e.statusText,e.getAllResponseHeaders().split(/\r?\n/).forEach((function(e){var t=e.match(/^([^:]+):\s*(.*)/);if(t){var r=t[1].toLowerCase();"set-cookie"===r?(void 0===l.headers[r]&&(l.headers[r]=[]),l.headers[r].push(t[2])):void 0!==l.headers[r]?l.headers[r]+=", "+t[2]:l.headers[r]=t[2],l.rawHeaders.push(t[1],t[2])}})),l._charset="x-user-defined",!i.overrideMimeType){var f=l.rawHeaders["mime-type"];if(f){var h=f.match(/;\s*charset=([^;])(;|$)/);h&&(l._charset=h[1].toLowerCase())}l._charset||(l._charset="utf-8")}}};a(l,s.Readable),l.prototype._read=function(){var e=this._resumeFetch;e&&(this._resumeFetch=null,e())},l.prototype._onXHRProgress=function(){var e=this,t=e._xhr,n=null;switch(e._mode){case"text":if((n=t.responseText).length>e._pos){var i=n.substr(e._pos);if("x-user-defined"===e._charset){for(var a=o.alloc(i.length),s=0;s<i.length;s++)a[s]=255&i.charCodeAt(s);e.push(a)}else e.push(i,e._charset);e._pos=n.length}break;case"arraybuffer":if(t.readyState!==u.DONE||!t.response)break;n=t.response,e.push(o.from(new Uint8Array(n)));break;case"moz-chunked-arraybuffer":if(n=t.response,t.readyState!==u.LOADING||!n)break;e.push(o.from(new Uint8Array(n)));break;case"ms-stream":if(n=t.response,t.readyState!==u.LOADING)break;var l=new r.g.MSStreamReader;l.onprogress=function(){l.result.byteLength>e._pos&&(e.push(o.from(new Uint8Array(l.result.slice(e._pos)))),e._pos=l.result.byteLength)},l.onload=function(){e.push(null)},l.readAsArrayBuffer(n)}e._xhr.readyState===u.DONE&&"ms-stream"!==e._mode&&e.push(null)}},96308:function(e){"use strict";var t={};function r(e,r,n){n||(n=Error);var o=function(e){var t,n;function o(t,n,o){return e.call(this,function(e,t,n){return"string"===typeof r?r:r(e,t,n)}(t,n,o))||this}return n=e,(t=o).prototype=Object.create(n.prototype),t.prototype.constructor=t,t.__proto__=n,o}(n);o.prototype.name=n.name,o.prototype.code=e,t[e]=o}function n(e,t){if(Array.isArray(e)){var r=e.length;return e=e.map((function(e){return String(e)})),r>2?"one of ".concat(t," ").concat(e.slice(0,r-1).join(", "),", or ")+e[r-1]:2===r?"one of ".concat(t," ").concat(e[0]," or ").concat(e[1]):"of ".concat(t," ").concat(e[0])}return"of ".concat(t," ").concat(String(e))}r("ERR_INVALID_OPT_VALUE",(function(e,t){return'The value "'+t+'" is invalid for option "'+e+'"'}),TypeError),r("ERR_INVALID_ARG_TYPE",(function(e,t,r){var o,i,a,s;if("string"===typeof t&&(i="not ",t.substr(!a||a<0?0:+a,i.length)===i)?(o="must not be",t=t.replace(/^not /,"")):o="must be",function(e,t,r){return(void 0===r||r>e.length)&&(r=e.length),e.substring(r-t.length,r)===t}(e," argument"))s="The ".concat(e," ").concat(o," ").concat(n(t,"type"));else{var u=function(e,t,r){return"number"!==typeof r&&(r=0),!(r+t.length>e.length)&&-1!==e.indexOf(t,r)}(e,".")?"property":"argument";s='The "'.concat(e,'" ').concat(u," ").concat(o," ").concat(n(t,"type"))}return s+=". Received type ".concat(typeof r)}),TypeError),r("ERR_STREAM_PUSH_AFTER_EOF","stream.push() after EOF"),r("ERR_METHOD_NOT_IMPLEMENTED",(function(e){return"The "+e+" method is not implemented"})),r("ERR_STREAM_PREMATURE_CLOSE","Premature close"),r("ERR_STREAM_DESTROYED",(function(e){return"Cannot call "+e+" after a stream was destroyed"})),r("ERR_MULTIPLE_CALLBACK","Callback called multiple times"),r("ERR_STREAM_CANNOT_PIPE","Cannot pipe, not readable"),r("ERR_STREAM_WRITE_AFTER_END","write after end"),r("ERR_STREAM_NULL_VALUES","May not write null values to stream",TypeError),r("ERR_UNKNOWN_ENCODING",(function(e){return"Unknown encoding: "+e}),TypeError),r("ERR_STREAM_UNSHIFT_AFTER_END_EVENT","stream.unshift() after end event"),e.exports.q=t},13168:function(e,t,r){"use strict";var n=r(34406),o=Object.keys||function(e){var t=[];for(var r in e)t.push(r);return t};e.exports=c;var i=r(41975),a=r(70437);r(91285)(c,i);for(var s=o(a.prototype),u=0;u<s.length;u++){var l=s[u];c.prototype[l]||(c.prototype[l]=a.prototype[l])}function c(e){if(!(this instanceof c))return new c(e);i.call(this,e),a.call(this,e),this.allowHalfOpen=!0,e&&(!1===e.readable&&(this.readable=!1),!1===e.writable&&(this.writable=!1),!1===e.allowHalfOpen&&(this.allowHalfOpen=!1,this.once("end",d)))}function d(){this._writableState.ended||n.nextTick(f,this)}function f(e){e.end()}Object.defineProperty(c.prototype,"writableHighWaterMark",{enumerable:!1,get:function(){return this._writableState.highWaterMark}}),Object.defineProperty(c.prototype,"writableBuffer",{enumerable:!1,get:function(){return this._writableState&&this._writableState.getBuffer()}}),Object.defineProperty(c.prototype,"writableLength",{enumerable:!1,get:function(){return this._writableState.length}}),Object.defineProperty(c.prototype,"destroyed",{enumerable:!1,get:function(){return void 0!==this._readableState&&void 0!==this._writableState&&(this._readableState.destroyed&&this._writableState.destroyed)},set:function(e){void 0!==this._readableState&&void 0!==this._writableState&&(this._readableState.destroyed=e,this._writableState.destroyed=e)}})},584:function(e,t,r){"use strict";e.exports=o;var n=r(77863);function o(e){if(!(this instanceof o))return new o(e);n.call(this,e)}r(91285)(o,n),o.prototype._transform=function(e,t,r){r(null,e)}},41975:function(e,t,r){"use strict";var n,o=r(34406);e.exports=T,T.ReadableState=E;r(22699).EventEmitter;var i=function(e,t){return e.listeners(t).length},a=r(62233),s=r(48834).Buffer,u=r.g.Uint8Array||function(){};var l,c=r(57017);l=c&&c.debuglog?c.debuglog("stream"):function(){};var d,f,h,p=r(30711),b=r(37612),y=r(57241).getHighWaterMark,g=r(96308).q,m=g.ERR_INVALID_ARG_TYPE,_=g.ERR_STREAM_PUSH_AFTER_EOF,v=g.ERR_METHOD_NOT_IMPLEMENTED,w=g.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;r(91285)(T,a);var R=b.errorOrDestroy,S=["error","close","destroy","pause","resume"];function E(e,t,o){n=n||r(13168),e=e||{},"boolean"!==typeof o&&(o=t instanceof n),this.objectMode=!!e.objectMode,o&&(this.objectMode=this.objectMode||!!e.readableObjectMode),this.highWaterMark=y(this,e,"readableHighWaterMark",o),this.buffer=new p,this.length=0,this.pipes=null,this.pipesCount=0,this.flowing=null,this.ended=!1,this.endEmitted=!1,this.reading=!1,this.sync=!0,this.needReadable=!1,this.emittedReadable=!1,this.readableListening=!1,this.resumeScheduled=!1,this.paused=!0,this.emitClose=!1!==e.emitClose,this.autoDestroy=!!e.autoDestroy,this.destroyed=!1,this.defaultEncoding=e.defaultEncoding||"utf8",this.awaitDrain=0,this.readingMore=!1,this.decoder=null,this.encoding=null,e.encoding&&(d||(d=r(30214).s),this.decoder=new d(e.encoding),this.encoding=e.encoding)}function T(e){if(n=n||r(13168),!(this instanceof T))return new T(e);var t=this instanceof n;this._readableState=new E(e,this,t),this.readable=!0,e&&("function"===typeof e.read&&(this._read=e.read),"function"===typeof e.destroy&&(this._destroy=e.destroy)),a.call(this)}function k(e,t,r,n,o){l("readableAddChunk",t);var i,a=e._readableState;if(null===t)a.reading=!1,function(e,t){if(l("onEofChunk"),t.ended)return;if(t.decoder){var r=t.decoder.end();r&&r.length&&(t.buffer.push(r),t.length+=t.objectMode?1:r.length)}t.ended=!0,t.sync?A(e):(t.needReadable=!1,t.emittedReadable||(t.emittedReadable=!0,x(e)))}(e,a);else if(o||(i=function(e,t){var r;n=t,s.isBuffer(n)||n instanceof u||"string"===typeof t||void 0===t||e.objectMode||(r=new m("chunk",["string","Buffer","Uint8Array"],t));var n;return r}(a,t)),i)R(e,i);else if(a.objectMode||t&&t.length>0)if("string"===typeof t||a.objectMode||Object.getPrototypeOf(t)===s.prototype||(t=function(e){return s.from(e)}(t)),n)a.endEmitted?R(e,new w):M(e,a,t,!0);else if(a.ended)R(e,new _);else{if(a.destroyed)return!1;a.reading=!1,a.decoder&&!r?(t=a.decoder.write(t),a.objectMode||0!==t.length?M(e,a,t,!1):L(e,a)):M(e,a,t,!1)}else n||(a.reading=!1,L(e,a));return!a.ended&&(a.length<a.highWaterMark||0===a.length)}function M(e,t,r,n){t.flowing&&0===t.length&&!t.sync?(t.awaitDrain=0,e.emit("data",r)):(t.length+=t.objectMode?1:r.length,n?t.buffer.unshift(r):t.buffer.push(r),t.needReadable&&A(e)),L(e,t)}Object.defineProperty(T.prototype,"destroyed",{enumerable:!1,get:function(){return void 0!==this._readableState&&this._readableState.destroyed},set:function(e){this._readableState&&(this._readableState.destroyed=e)}}),T.prototype.destroy=b.destroy,T.prototype._undestroy=b.undestroy,T.prototype._destroy=function(e,t){t(e)},T.prototype.push=function(e,t){var r,n=this._readableState;return n.objectMode?r=!0:"string"===typeof e&&((t=t||n.defaultEncoding)!==n.encoding&&(e=s.from(e,t),t=""),r=!0),k(this,e,t,!1,r)},T.prototype.unshift=function(e){return k(this,e,null,!0,!1)},T.prototype.isPaused=function(){return!1===this._readableState.flowing},T.prototype.setEncoding=function(e){d||(d=r(30214).s);var t=new d(e);this._readableState.decoder=t,this._readableState.encoding=this._readableState.decoder.encoding;for(var n=this._readableState.buffer.head,o="";null!==n;)o+=t.write(n.data),n=n.next;return this._readableState.buffer.clear(),""!==o&&this._readableState.buffer.push(o),this._readableState.length=o.length,this};var O=1073741824;function C(e,t){return e<=0||0===t.length&&t.ended?0:t.objectMode?1:e!==e?t.flowing&&t.length?t.buffer.head.data.length:t.length:(e>t.highWaterMark&&(t.highWaterMark=function(e){return e>=O?e=O:(e--,e|=e>>>1,e|=e>>>2,e|=e>>>4,e|=e>>>8,e|=e>>>16,e++),e}(e)),e<=t.length?e:t.ended?t.length:(t.needReadable=!0,0))}function A(e){var t=e._readableState;l("emitReadable",t.needReadable,t.emittedReadable),t.needReadable=!1,t.emittedReadable||(l("emitReadable",t.flowing),t.emittedReadable=!0,o.nextTick(x,e))}function x(e){var t=e._readableState;l("emitReadable_",t.destroyed,t.length,t.ended),t.destroyed||!t.length&&!t.ended||(e.emit("readable"),t.emittedReadable=!1),t.needReadable=!t.flowing&&!t.ended&&t.length<=t.highWaterMark,I(e)}function L(e,t){t.readingMore||(t.readingMore=!0,o.nextTick(P,e,t))}function P(e,t){for(;!t.reading&&!t.ended&&(t.length<t.highWaterMark||t.flowing&&0===t.length);){var r=t.length;if(l("maybeReadMore read 0"),e.read(0),r===t.length)break}t.readingMore=!1}function j(e){var t=e._readableState;t.readableListening=e.listenerCount("readable")>0,t.resumeScheduled&&!t.paused?t.flowing=!0:e.listenerCount("data")>0&&e.resume()}function | (e){l("readable nexttick read 0"),e.read(0)}function N(e,t){l("resume",t.reading),t.reading||e.read(0),t.resumeScheduled=!1,e.emit("resume"),I(e),t.flowing&&!t.reading&&e.read(0)}function I(e){var t=e._readableState;for(l("flow",t.flowing);t.flowing&&null!==e.read(););}function U(e,t){return 0===t.length?null:(t.objectMode?r=t.buffer.shift():!e||e>=t.length?(r=t.decoder?t.buffer.join(""):1===t.buffer.length?t.buffer.first():t.buffer.concat(t.length),t.buffer.clear()):r=t.buffer.consume(e,t.decoder),r);var r}function q(e){var t=e._readableState;l("endReadable",t.endEmitted),t.endEmitted||(t.ended=!0,o.nextTick(H,t,e))}function H(e,t){if(l("endReadableNT",e.endEmitted,e.length),!e.endEmitted&&0===e.length&&(e.endEmitted=!0,t.readable=!1,t.emit("end"),e.autoDestroy)){var r=t._writableState;(!r||r.autoDestroy&&r.finished)&&t.destroy()}}function B(e,t){for(var r=0,n=e.length;r<n;r++)if(e[r]===t)return r;return-1}T.prototype.read=function(e){l("read",e),e=parseInt(e,10);var t=this._readableState,r=e;if(0!==e&&(t.emittedReadable=!1),0===e&&t.needReadable&&((0!==t.highWaterMark?t.length>=t.highWaterMark:t.length>0)||t.ended))return l("read: emitReadable",t.length,t.ended),0===t.length&&t.ended?q(this):A(this),null;if(0===(e=C(e,t))&&t.ended)return 0===t.length&&q(this),null;var n,o=t.needReadable;return l("need readable",o),(0===t.length||t.length-e<t.highWaterMark)&&l("length less than watermark",o=!0),t.ended||t.reading?l("reading or ended",o=!1):o&&(l("do read"),t.reading=!0,t.sync=!0,0===t.length&&(t.needReadable=!0),this._read(t.highWaterMark),t.sync=!1,t.reading||(e=C(r,t))),null===(n=e>0?U(e,t):null)?(t.needReadable=t.length<=t.highWaterMark,e=0):(t.length-=e,t.awaitDrain=0),0===t.length&&(t.ended||(t.needReadable=!0),r!==e&&t.ended&&q(this)),null!==n&&this.emit("data",n),n},T.prototype._read=function(e){R(this,new v("_read()"))},T.prototype.pipe=function(e,t){var r=this,n=this._readableState;switch(n.pipesCount){case 0:n.pipes=e;break;case 1:n.pipes=[n.pipes,e];break;default:n.pipes.push(e)}n.pipesCount+=1,l("pipe count=%d opts=%j",n.pipesCount,t);var a=(!t||!1!==t.end)&&e!==o.stdout&&e!==o.stderr?u:y;function s(t,o){l("onunpipe"),t===r&&o&&!1===o.hasUnpiped&&(o.hasUnpiped=!0,l("cleanup"),e.removeListener("close",p),e.removeListener("finish",b),e.removeListener("drain",c),e.removeListener("error",h),e.removeListener("unpipe",s),r.removeListener("end",u),r.removeListener("end",y),r.removeListener("data",f),d=!0,!n.awaitDrain||e._writableState&&!e._writableState.needDrain||c())}function u(){l("onend"),e.end()}n.endEmitted?o.nextTick(a):r.once("end",a),e.on("unpipe",s);var c=function(e){return function(){var t=e._readableState;l("pipeOnDrain",t.awaitDrain),t.awaitDrain&&t.awaitDrain--,0===t.awaitDrain&&i(e,"data")&&(t.flowing=!0,I(e))}}(r);e.on("drain",c);var d=!1;function f(t){l("ondata");var o=e.write(t);l("dest.write",o),!1===o&&((1===n.pipesCount&&n.pipes===e||n.pipesCount>1&&-1!==B(n.pipes,e))&&!d&&(l("false write response, pause",n.awaitDrain),n.awaitDrain++),r.pause())}function h(t){l("onerror",t),y(),e.removeListener("error",h),0===i(e,"error")&&R(e,t)}function p(){e.removeListener("finish",b),y()}function b(){l("onfinish"),e.removeListener("close",p),y()}function y(){l("unpipe"),r.unpipe(e)}return r.on("data",f),function(e,t,r){if("function"===typeof e.prependListener)return e.prependListener(t,r);e._events&&e._events[t]?Array.isArray(e._events[t])?e._events[t].unshift(r):e._events[t]=[r,e._events[t]]:e.on(t,r)}(e,"error",h),e.once("close",p),e.once("finish",b),e.emit("pipe",r),n.flowing||(l("pipe resume"),r.resume()),e},T.prototype.unpipe=function(e){var t=this._readableState,r={hasUnpiped:!1};if(0===t.pipesCount)return this;if(1===t.pipesCount)return e&&e!==t.pipes||(e||(e=t.pipes),t.pipes=null,t.pipesCount=0,t.flowing=!1,e&&e.emit("unpipe",this,r)),this;if(!e){var n=t.pipes,o=t.pipesCount;t.pipes=null,t.pipesCount=0,t.flowing=!1;for(var i=0;i<o;i++)n[i].emit("unpipe",this,{hasUnpiped:!1});return this}var a=B(t.pipes,e);return-1===a||(t.pipes.splice(a,1),t.pipesCount-=1,1===t.pipesCount&&(t.pipes=t.pipes[0]),e.emit("unpipe",this,r)),this},T.prototype.on=function(e,t){var r=a.prototype.on.call(this,e,t),n=this._readableState;return"data"===e?(n.readableListening=this.listenerCount("readable")>0,!1!==n.flowing&&this.resume()):"readable"===e&&(n.endEmitted||n.readableListening||(n.readableListening=n.needReadable=!0,n.flowing=!1,n.emittedReadable=!1,l("on readable",n.length,n.reading),n.length?A(this):n.reading||o.nextTick(D,this))),r},T.prototype.addListener=T.prototype.on,T.prototype.removeListener=function(e,t){var r=a.prototype.removeListener.call(this,e,t);return"readable"===e&&o.nextTick(j,this),r},T.prototype.removeAllListeners=function(e){var t=a.prototype.removeAllListeners.apply(this,arguments);return"readable"!==e&&void 0!==e||o.nextTick(j,this),t},T.prototype.resume=function(){var e=this._readableState;return e.flowing||(l("resume"),e.flowing=!e.readableListening,function(e,t){t.resumeScheduled||(t.resumeScheduled=!0,o.nextTick(N,e,t))}(this,e)),e.paused=!1,this},T.prototype.pause=function(){return l("call pause flowing=%j",this._readableState.flowing),!1!==this._readableState.flowing&&(l("pause"),this._readableState.flowing=!1,this.emit("pause")),this._readableState.paused=!0,this},T.prototype.wrap=function(e){var t=this,r=this._readableState,n=!1;for(var o in e.on("end",(function(){if(l("wrapped end"),r.decoder&&!r.ended){var e=r.decoder.end();e&&e.length&&t.push(e)}t.push(null)})),e.on("data",(function(o){(l("wrapped data"),r.decoder&&(o=r.decoder.write(o)),!r.objectMode||null!==o&&void 0!==o)&&((r.objectMode||o&&o.length)&&(t.push(o)||(n=!0,e.pause())))})),e)void 0===this[o]&&"function"===typeof e[o]&&(this[o]=function(t){return function(){return e[t].apply(e,arguments)}}(o));for(var i=0;i<S.length;i++)e.on(S[i],this.emit.bind(this,S[i]));return this._read=function(t){l("wrapped _read",t),n&&(n=!1,e.resume())},this},"function"===typeof Symbol&&(T.prototype[Symbol.asyncIterator]=function(){return void 0===f&&(f=r(13793)),f(this)}),Object.defineProperty(T.prototype,"readableHighWaterMark",{enumerable:!1,get:function(){return this._readableState.highWaterMark}}),Object.defineProperty(T.prototype,"readableBuffer",{enumerable:!1,get:function(){return this._readableState&&this._readableState.buffer}}),Object.defineProperty(T.prototype,"readableFlowing",{enumerable:!1,get:function(){return this._readableState.flowing},set:function(e){this._readableState&&(this._readableState.flowing=e)}}),T._fromList=U,Object.defineProperty(T.prototype,"readableLength",{enumerable:!1,get:function(){return this._readableState.length}}),"function"===typeof Symbol&&(T.from=function(e,t){return void 0===h&&(h=r(89235)),h(T,e,t)})},77863:function(e,t,r){"use strict";e.exports=c;var n=r(96308).q,o=n.ERR_METHOD_NOT_IMPLEMENTED,i=n.ERR_MULTIPLE_CALLBACK,a=n.ERR_TRANSFORM_ALREADY_TRANSFORMING,s=n.ERR_TRANSFORM_WITH_LENGTH_0,u=r(13168);function l(e,t){var r=this._transformState;r.transforming=!1;var n=r.writecb;if(null===n)return this.emit("error",new i);r.writechunk=null,r.writecb=null,null!=t&&this.push(t),n(e);var o=this._readableState;o.reading=!1,(o.needReadable||o.length<o.highWaterMark)&&this._read(o.highWaterMark)}function c(e){if(!(this instanceof c))return new c(e);u.call(this,e),this._transformState={afterTransform:l.bind(this),needTransform:!1,transforming:!1,writecb:null,writechunk:null,writeencoding:null},this._readableState.needReadable=!0,this._readableState.sync=!1,e&&("function"===typeof e.transform&&(this._transform=e.transform),"function"===typeof e.flush&&(this._flush=e.flush)),this.on("prefinish",d)}function d(){var e=this;"function"!==typeof this._flush||this._readableState.destroyed?f(this,null,null):this._flush((function(t,r){f(e,t,r)}))}function f(e,t,r){if(t)return e.emit("error",t);if(null!=r&&e.push(r),e._writableState.length)throw new s;if(e._transformState.transforming)throw new a;return e.push(null)}r(91285)(c,u),c.prototype.push=function(e,t){return this._transformState.needTransform=!1,u.prototype.push.call(this,e,t)},c.prototype._transform=function(e,t,r){r(new o("_transform()"))},c.prototype._write=function(e,t,r){var n=this._transformState;if(n.writecb=r,n.writechunk=e,n.writeencoding=t,!n.transforming){var o=this._readableState;(n.needTransform||o.needReadable||o.length<o.highWaterMark)&&this._read(o.highWaterMark)}},c.prototype._read=function(e){var t=this._transformState;null===t.writechunk||t.transforming?t.needTransform=!0:(t.transforming=!0,this._transform(t.writechunk,t.writeencoding,t.afterTransform))},c.prototype._destroy=function(e,t){u.prototype._destroy.call(this,e,(function(e){t(e)}))}},70437:function(e,t,r){"use strict";var n,o=r(34406);function i(e){var t=this;this.next=null,this.entry=null,this.finish=function(){!function(e,t,r){var n=e.entry;e.entry=null;for(;n;){var o=n.callback;t.pendingcb--,o(r),n=n.next}t.corkedRequestsFree.next=e}(t,e)}}e.exports=T,T.WritableState=E;var a={deprecate:r(5803)},s=r(62233),u=r(48834).Buffer,l=r.g.Uint8Array||function(){};var c,d=r(37612),f=r(57241).getHighWaterMark,h=r(96308).q,p=h.ERR_INVALID_ARG_TYPE,b=h.ERR_METHOD_NOT_IMPLEMENTED,y=h.ERR_MULTIPLE_CALLBACK,g=h.ERR_STREAM_CANNOT_PIPE,m=h.ERR_STREAM_DESTROYED,_=h.ERR_STREAM_NULL_VALUES,v=h.ERR_STREAM_WRITE_AFTER_END,w=h.ERR_UNKNOWN_ENCODING,R=d.errorOrDestroy;function S(){}function E(e,t,a){n=n||r(13168),e=e||{},"boolean"!==typeof a&&(a=t instanceof n),this.objectMode=!!e.objectMode,a&&(this.objectMode=this.objectMode||!!e.writableObjectMode),this.highWaterMark=f(this,e,"writableHighWaterMark",a),this.finalCalled=!1,this.needDrain=!1,this.ending=!1,this.ended=!1,this.finished=!1,this.destroyed=!1;var s=!1===e.decodeStrings;this.decodeStrings=!s,this.defaultEncoding=e.defaultEncoding||"utf8",this.length=0,this.writing=!1,this.corked=0,this.sync=!0,this.bufferProcessing=!1,this.onwrite=function(e){!function(e,t){var r=e._writableState,n=r.sync,i=r.writecb;if("function"!==typeof i)throw new y;if(function(e){e.writing=!1,e.writecb=null,e.length-=e.writelen,e.writelen=0}(r),t)!function(e,t,r,n,i){--t.pendingcb,r?(o.nextTick(i,n),o.nextTick(x,e,t),e._writableState.errorEmitted=!0,R(e,n)):(i(n),e._writableState.errorEmitted=!0,R(e,n),x(e,t))}(e,r,n,t,i);else{var a=C(r)||e.destroyed;a||r.corked||r.bufferProcessing||!r.bufferedRequest||O(e,r),n?o.nextTick(M,e,r,a,i):M(e,r,a,i)}}(t,e)},this.writecb=null,this.writelen=0,this.bufferedRequest=null,this.lastBufferedRequest=null,this.pendingcb=0,this.prefinished=!1,this.errorEmitted=!1,this.emitClose=!1!==e.emitClose,this.autoDestroy=!!e.autoDestroy,this.bufferedRequestCount=0,this.corkedRequestsFree=new i(this)}function T(e){var t=this instanceof(n=n||r(13168));if(!t&&!c.call(T,this))return new T(e);this._writableState=new E(e,this,t),this.writable=!0,e&&("function"===typeof e.write&&(this._write=e.write),"function"===typeof e.writev&&(this._writev=e.writev),"function"===typeof e.destroy&&(this._destroy=e.destroy),"function"===typeof e.final&&(this._final=e.final)),s.call(this)}function k(e,t,r,n,o,i,a){t.writelen=n,t.writecb=a,t.writing=!0,t.sync=!0,t.destroyed?t.onwrite(new m("write")):r?e._writev(o,t.onwrite):e._write(o,i,t.onwrite),t.sync=!1}function M(e,t,r,n){r||function(e,t){0===t.length&&t.needDrain&&(t.needDrain=!1,e.emit("drain"))}(e,t),t.pendingcb--,n(),x(e,t)}function O(e,t){t.bufferProcessing=!0;var r=t.bufferedRequest;if(e._writev&&r&&r.next){var n=t.bufferedRequestCount,o=new Array(n),a=t.corkedRequestsFree;a.entry=r;for(var s=0,u=!0;r;)o[s]=r,r.isBuf||(u=!1),r=r.next,s+=1;o.allBuffers=u,k(e,t,!0,t.length,o,"",a.finish),t.pendingcb++,t.lastBufferedRequest=null,a.next?(t.corkedRequestsFree=a.next,a.next=null):t.corkedRequestsFree=new i(t),t.bufferedRequestCount=0}else{for(;r;){var l=r.chunk,c=r.encoding,d=r.callback;if(k(e,t,!1,t.objectMode?1:l.length,l,c,d),r=r.next,t.bufferedRequestCount--,t.writing)break}null===r&&(t.lastBufferedRequest=null)}t.bufferedRequest=r,t.bufferProcessing=!1}function C(e){return e.ending&&0===e.length&&null===e.bufferedRequest&&!e.finished&&!e.writing}function A(e,t){e._final((function(r){t.pendingcb--,r&&R(e,r),t.prefinished=!0,e.emit("prefinish"),x(e,t)}))}function x(e,t){var r=C(t);if(r&&(function(e,t){t.prefinished||t.finalCalled||("function"!==typeof e._final||t.destroyed?(t.prefinished=!0,e.emit("prefinish")):(t.pendingcb++,t.finalCalled=!0,o.nextTick(A,e,t)))}(e,t),0===t.pendingcb&&(t.finished=!0,e.emit("finish"),t.autoDestroy))){var n=e._readableState;(!n||n.autoDestroy&&n.endEmitted)&&e.destroy()}return r}r(91285)(T,s),E.prototype.getBuffer=function(){for(var e=this.bufferedRequest,t=[];e;)t.push(e),e=e.next;return t},function(){try{Object.defineProperty(E.prototype,"buffer",{get:a.deprecate((function(){return this.getBuffer()}),"_writableState.buffer is deprecated. Use _writableState.getBuffer instead.","DEP0003")})}catch(e){}}(),"function"===typeof Symbol&&Symbol.hasInstance&&"function"===typeof Function.prototype[Symbol.hasInstance]?(c=Function.prototype[Symbol.hasInstance],Object.defineProperty(T,Symbol.hasInstance,{value:function(e){return!!c.call(this,e)||this===T&&(e&&e._writableState instanceof E)}})):c=function(e){return e instanceof this},T.prototype.pipe=function(){R(this,new g)},T.prototype.write=function(e,t,r){var n,i=this._writableState,a=!1,s=!i.objectMode&&(n=e,u.isBuffer(n)||n instanceof l);return s&&!u.isBuffer(e)&&(e=function(e){return u.from(e)}(e)),"function"===typeof t&&(r=t,t=null),s?t="buffer":t||(t=i.defaultEncoding),"function"!==typeof r&&(r=S),i.ending?function(e,t){var r=new v;R(e,r),o.nextTick(t,r)}(this,r):(s||function(e,t,r,n){var i;return null===r?i=new _:"string"===typeof r||t.objectMode||(i=new p("chunk",["string","Buffer"],r)),!i||(R(e,i),o.nextTick(n,i),!1)}(this,i,e,r))&&(i.pendingcb++,a=function(e,t,r,n,o,i){if(!r){var a=function(e,t,r){e.objectMode||!1===e.decodeStrings||"string"!==typeof t||(t=u.from(t,r));return t}(t,n,o);n!==a&&(r=!0,o="buffer",n=a)}var s=t.objectMode?1:n.length;t.length+=s;var l=t.length<t.highWaterMark;l||(t.needDrain=!0);if(t.writing||t.corked){var c=t.lastBufferedRequest;t.lastBufferedRequest={chunk:n,encoding:o,isBuf:r,callback:i,next:null},c?c.next=t.lastBufferedRequest:t.bufferedRequest=t.lastBufferedRequest,t.bufferedRequestCount+=1}else k(e,t,!1,s,n,o,i);return l}(this,i,s,e,t,r)),a},T.prototype.cork=function(){this._writableState.corked++},T.prototype.uncork=function(){var e=this._writableState;e.corked&&(e.corked--,e.writing||e.corked||e.bufferProcessing||!e.bufferedRequest||O(this,e))},T.prototype.setDefaultEncoding=function(e){if("string"===typeof e&&(e=e.toLowerCase()),!(["hex","utf8","utf-8","ascii","binary","base64","ucs2","ucs-2","utf16le","utf-16le","raw"].indexOf((e+"").toLowerCase())>-1))throw new w(e);return this._writableState.defaultEncoding=e,this},Object.defineProperty(T.prototype,"writableBuffer",{enumerable:!1,get:function(){return this._writableState&&this._writableState.getBuffer()}}),Object.defineProperty(T.prototype,"writableHighWaterMark",{enumerable:!1,get:function(){return this._writableState.highWaterMark}}),T.prototype._write=function(e,t,r){r(new b("_write()"))},T.prototype._writev=null,T.prototype.end=function(e,t,r){var n=this._writableState;return"function"===typeof e?(r=e,e=null,t=null):"function"===typeof t&&(r=t,t=null),null!==e&&void 0!==e&&this.write(e,t),n.corked&&(n.corked=1,this.uncork()),n.ending||function(e,t,r){t.ending=!0,x(e,t),r&&(t.finished?o.nextTick(r):e.once("finish",r));t.ended=!0,e.writable=!1}(this,n,r),this},Object.defineProperty(T.prototype,"writableLength",{enumerable:!1,get:function(){return this._writableState.length}}),Object.defineProperty(T.prototype,"destroyed",{enumerable:!1,get:function(){return void 0!==this._writableState&&this._writableState.destroyed},set:function(e){this._writableState&&(this._writableState.destroyed=e)}}),T.prototype.destroy=d.destroy,T.prototype._undestroy=d.undestroy,T.prototype._destroy=function(e,t){t(e)}},13793:function(e,t,r){"use strict";var n,o=r(34406);function i(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}var a=r(92100),s=Symbol("lastResolve"),u=Symbol("lastReject"),l=Symbol("error"),c=Symbol("ended"),d=Symbol("lastPromise"),f=Symbol("handlePromise"),h=Symbol("stream");function p(e,t){return{value:e,done:t}}function b(e){var t=e[s];if(null!==t){var r=e[h].read();null!==r&&(e[d]=null,e[s]=null,e[u]=null,t(p(r,!1)))}}function y(e){o.nextTick(b,e)}var g=Object.getPrototypeOf((function(){})),m=Object.setPrototypeOf((i(n={get stream(){return this[h]},next:function(){var e=this,t=this[l];if(null!==t)return Promise.reject(t);if(this[c])return Promise.resolve(p(void 0,!0));if(this[h].destroyed)return new Promise((function(t,r){o.nextTick((function(){e[l]?r(e[l]):t(p(void 0,!0))}))}));var r,n=this[d];if(n)r=new Promise(function(e,t){return function(r,n){e.then((function(){t[c]?r(p(void 0,!0)):t[f](r,n)}),n)}}(n,this));else{var i=this[h].read();if(null!==i)return Promise.resolve(p(i,!1));r=new Promise(this[f])}return this[d]=r,r}},Symbol.asyncIterator,(function(){return this})),i(n,"return",(function(){var e=this;return new Promise((function(t,r){e[h].destroy(null,(function(e){e?r(e):t(p(void 0,!0))}))}))})),n),g);e.exports=function(e){var t,r=Object.create(m,(i(t={},h,{value:e,writable:!0}),i(t,s,{value:null,writable:!0}),i(t,u,{value:null,writable:!0}),i(t,l,{value:null,writable:!0}),i(t,c,{value:e._readableState.endEmitted,writable:!0}),i(t,f,{value:function(e,t){var n=r[h].read();n?(r[d]=null,r[s]=null,r[u]=null,e(p(n,!1))):(r[s]=e,r[u]=t)},writable:!0}),t));return r[d]=null,a(e,(function(e){if(e&&"ERR_STREAM_PREMATURE_CLOSE"!==e.code){var t=r[u];return null!==t&&(r[d]=null,r[s]=null,r[u]=null,t(e)),void(r[l]=e)}var n=r[s];null!==n&&(r[d]=null,r[s]=null,r[u]=null,n(p(void 0,!0))),r[c]=!0})),e.on("readable",y.bind(null,r)),r}},30711:function(e,t,r){"use strict";function n(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){for(var r=0;r<t.length;r++){var n=t[r];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(e,n.key,n)}}var a=r(48834).Buffer,s=r(43194).inspect,u=s&&s.custom||"inspect";e.exports=function(){function e(){!function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,e),this.head=null,this.tail=null,this.length=0}var t,r,l;return t=e,(r=[{key:"push",value:function(e){var t={data:e,next:null};this.length>0?this.tail.next=t:this.head=t,this.tail=t,++this.length}},{key:"unshift",value:function(e){var t={data:e,next:this.head};0===this.length&&(this.tail=t),this.head=t,++this.length}},{key:"shift",value:function(){if(0!==this.length){var e=this.head.data;return 1===this.length?this.head=this.tail=null:this.head=this.head.next,--this.length,e}}},{key:"clear",value:function(){this.head=this.tail=null,this.length=0}},{key:"join",value:function(e){if(0===this.length)return"";for(var t=this.head,r=""+t.data;t=t.next;)r+=e+t.data;return r}},{key:"concat",value:function(e){if(0===this.length)return a.alloc(0);for(var t,r,n,o=a.allocUnsafe(e>>>0),i=this.head,s=0;i;)t=i.data,r=o,n=s,a.prototype.copy.call(t,r,n),s+=i.data.length,i=i.next;return o}},{key:"consume",value:function(e,t){var r;return e<this.head.data.length?(r=this.head.data.slice(0,e),this.head.data=this.head.data.slice(e)):r=e===this.head.data.length?this.shift():t?this._getString(e):this._getBuffer(e),r}},{key:"first",value:function(){return this.head.data}},{key:"_getString",value:function(e){var t=this.head,r=1,n=t.data;for(e-=n.length;t=t.next;){var o=t.data,i=e>o.length?o.length:e;if(i===o.length?n+=o:n+=o.slice(0,e),0===(e-=i)){i===o.length?(++r,t.next?this.head=t.next:this.head=this.tail=null):(this.head=t,t.data=o.slice(i));break}++r}return this.length-=r,n}},{key:"_getBuffer",value:function(e){var t=a.allocUnsafe(e),r=this.head,n=1;for(r.data.copy(t),e-=r.data.length;r=r.next;){var o=r.data,i=e>o.length?o.length:e;if(o.copy(t,t.length-e,0,i),0===(e-=i)){i===o.length?(++n,r.next?this.head=r.next:this.head=this.tail=null):(this.head=r,r.data=o.slice(i));break}++n}return this.length-=n,t}},{key:u,value:function(e,t){return s(this,function(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?n(Object(r),!0).forEach((function(t){o(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):n(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}({},t,{depth:0,customInspect:!1}))}}])&&i(t.prototype,r),l&&i(t,l),e}()},37612:function(e,t,r){"use strict";var n=r(34406);function o(e,t){a(e,t),i(e)}function i(e){e._writableState&&!e._writableState.emitClose||e._readableState&&!e._readableState.emitClose||e.emit("close")}function a(e,t){e.emit("error",t)}e.exports={destroy:function(e,t){var r=this,s=this._readableState&&this._readableState.destroyed,u=this._writableState&&this._writableState.destroyed;return s||u?(t?t(e):e&&(this._writableState?this._writableState.errorEmitted||(this._writableState.errorEmitted=!0,n.nextTick(a,this,e)):n.nextTick(a,this,e)),this):(this._readableState&&(this._readableState.destroyed=!0),this._writableState&&(this._writableState.destroyed=!0),this._destroy(e||null,(function(e){!t&&e?r._writableState?r._writableState.errorEmitted?n.nextTick(i,r):(r._writableState.errorEmitted=!0,n.nextTick(o,r,e)):n.nextTick(o,r,e):t?(n.nextTick(i,r),t(e)):n.nextTick(i,r)})),this)},undestroy:function(){this._readableState&&(this._readableState.destroyed=!1,this._readableState.reading=!1,this._readableState.ended=!1,this._readableState.endEmitted=!1),this._writableState&&(this._writableState.destroyed=!1,this._writableState.ended=!1,this._writableState.ending=!1,this._writableState.finalCalled=!1,this._writableState.prefinished=!1,this._writableState.finished=!1,this._writableState.errorEmitted=!1)},errorOrDestroy:function(e,t){var r=e._readableState,n=e._writableState;r&&r.autoDestroy||n&&n.autoDestroy?e.destroy(t):e.emit("error",t)}}},92100:function(e,t,r){"use strict";var n=r(96308).q.ERR_STREAM_PREMATURE_CLOSE;function o(){}e.exports=function e(t,r,i){if("function"===typeof r)return e(t,null,r);r||(r={}),i=function(e){var t=!1;return function(){if(!t){t=!0;for(var r=arguments.length,n=new Array(r),o=0;o<r;o++)n[o]=arguments[o];e.apply(this,n)}}}(i||o);var a=r.readable||!1!==r.readable&&t.readable,s=r.writable||!1!==r.writable&&t.writable,u=function(){t.writable||c()},l=t._writableState&&t._writableState.finished,c=function(){s=!1,l=!0,a||i.call(t)},d=t._readableState&&t._readableState.endEmitted,f=function(){a=!1,d=!0,s||i.call(t)},h=function(e){i.call(t,e)},p=function(){var e;return a&&!d?(t._readableState&&t._readableState.ended||(e=new n),i.call(t,e)):s&&!l?(t._writableState&&t._writableState.ended||(e=new n),i.call(t,e)):void 0},b=function(){t.req.on("finish",c)};return!function(e){return e.setHeader&&"function"===typeof e.abort}(t)?s&&!t._writableState&&(t.on("end",u),t.on("close",u)):(t.on("complete",c),t.on("abort",p),t.req?b():t.on("request",b)),t.on("end",f),t.on("finish",c),!1!==r.error&&t.on("error",h),t.on("close",p),function(){t.removeListener("complete",c),t.removeListener("abort",p),t.removeListener("request",b),t.req&&t.req.removeListener("finish",c),t.removeListener("end",u),t.removeListener("close",u),t.removeListener("finish",c),t.removeListener("end",f),t.removeListener("error",h),t.removeListener("close",p)}}},89235:function(e){e.exports=function(){throw new Error("Readable.from is not available in the browser")}},23145:function(e,t,r){"use strict";var n;var o=r(96308).q,i=o.ERR_MISSING_ARGS,a=o.ERR_STREAM_DESTROYED;function s(e){if(e)throw e}function u(e,t,o,i){i=function(e){var t=!1;return function(){t||(t=!0,e.apply(void 0,arguments))}}(i);var s=!1;e.on("close",(function(){s=!0})),void 0===n&&(n=r(92100)),n(e,{readable:t,writable:o},(function(e){if(e)return i(e);s=!0,i()}));var u=!1;return function(t){if(!s&&!u)return u=!0,function(e){return e.setHeader&&"function"===typeof e.abort}(e)?e.abort():"function"===typeof e.destroy?e.destroy():void i(t||new a("pipe"))}}function l(e){e()}function c(e,t){return e.pipe(t)}function d(e){return e.length?"function"!==typeof e[e.length-1]?s:e.pop():s}e.exports=function(){for(var e=arguments.length,t=new Array(e),r=0;r<e;r++)t[r]=arguments[r];var n,o=d(t);if(Array.isArray(t[0])&&(t=t[0]),t.length<2)throw new i("streams");var a=t.map((function(e,r){var i=r<t.length-1;return u(e,i,r>0,(function(e){n||(n=e),e&&a.forEach(l),i||(a.forEach(l),o(n))}))}));return t.reduce(c)}},57241:function(e,t,r){"use strict";var n=r(96308).q.ERR_INVALID_OPT_VALUE;e.exports={getHighWaterMark:function(e,t,r,o){var i=function(e,t,r){return null!=e.highWaterMark?e.highWaterMark:t?e[r]:null}(t,o,r);if(null!=i){if(!isFinite(i)||Math.floor(i)!==i||i<0)throw new n(o?r:"highWaterMark",i);return Math.floor(i)}return e.objectMode?16:16384}}},62233:function(e,t,r){e.exports=r(22699).EventEmitter},27669:function(e,t,r){(t=e.exports=r(41975)).Stream=t,t.Readable=t,t.Writable=r(70437),t.Duplex=r(13168),t.Transform=r(77863),t.PassThrough=r(584),t.finished=r(92100),t.pipeline=r(23145)},51960:function(e){e.exports=function(){for(var e={},r=0;r<arguments.length;r++){var n=arguments[r];for(var o in n)t.call(n,o)&&(e[o]=n[o])}return e};var t=Object.prototype.hasOwnProperty}}]); | D |
exchange_two_nos.py | #1
first = int(input("Enter the value of the first number : "))
second = int(input("Enter the value of the second number : "))
#2
first = first + second
#3 | first = first - second
#5
print("After exchange, First number is : ",first," Second number is : ",second) | second = first - second
#4 |
utils.py | from dublinbus.serializers import UserSerializer
def my_jwt_response_handler(token, user=None, request=None):
| ''' JWT response handler
Adds a new ‘user’ field with the user’s serialized data when a token is generated
'''
response = UserSerializer(user, context={'request': request}).data
response["token"] = token
return response
|
|
Recoil_Snapshot.js | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @emails oncall+recoil
* @flow strict-local
* @format
*/
'use strict';
import type {Loadable} from '../adt/Recoil_Loadable';
import type {
ResetRecoilState,
SetRecoilState,
ValueOrUpdater,
} from '../recoil_values/Recoil_callbackTypes';
import type {RecoilValueInfo} from './Recoil_FunctionalCore';
import type {NodeKey} from './Recoil_Keys';
import type {RecoilState, RecoilValue} from './Recoil_RecoilValue';
import type {StateID, Store, StoreState, TreeState} from './Recoil_State'; | const {isSSR} = require('../util/Recoil_Environment');
const err = require('../util/Recoil_err');
const filterIterable = require('../util/Recoil_filterIterable');
const gkx = require('../util/Recoil_gkx');
const nullthrows = require('../util/Recoil_nullthrows');
const recoverableViolation = require('../util/Recoil_recoverableViolation');
const {batchUpdates} = require('./Recoil_Batching');
const {
initializeNodeIfNewToStore,
peekNodeInfo,
} = require('./Recoil_FunctionalCore');
const {graph} = require('./Recoil_Graph');
const {
DEFAULT_VALUE,
recoilValues,
recoilValuesForKeys,
} = require('./Recoil_Node');
const {
AbstractRecoilValue,
getRecoilValueAsLoadable,
setRecoilValue,
setUnvalidatedRecoilValue,
} = require('./Recoil_RecoilValueInterface');
const {updateRetainCount} = require('./Recoil_Retention');
const {
getNextTreeStateVersion,
makeEmptyStoreState,
} = require('./Recoil_State');
// Opaque at this surface because it's part of the public API from here.
export opaque type SnapshotID = StateID;
const retainWarning = `
Recoil Snapshots only last for the duration of the callback they are provided to. To keep a Snapshot longer, do this:
const release = snapshot.retain();
try {
await useTheSnapshotAsynchronously(snapshot);
} finally {
release();
}
This is currently a DEV-only warning but will become a thrown exception in the next release of Recoil.
`;
// A "Snapshot" is "read-only" and captures a specific set of values of atoms.
// However, the data-flow-graph and selector values may evolve as selector
// evaluation functions are executed and async selectors resolve.
class Snapshot {
_store: Store;
_refCount: number = 0;
constructor(storeState: StoreState) {
this._store = {
getState: () => storeState,
replaceState: replacer => {
storeState.currentTree = replacer(storeState.currentTree); // no batching so nextTree is never active
},
getGraph: version => {
const graphs = storeState.graphsByVersion;
if (graphs.has(version)) {
return nullthrows(graphs.get(version));
}
const newGraph = graph();
graphs.set(version, newGraph);
return newGraph;
},
subscribeToTransactions: () => ({release: () => {}}),
addTransactionMetadata: () => {
throw err('Cannot subscribe to Snapshots');
},
};
// Initialize any nodes that are live in the parent store (primarily so that this
// snapshot gets counted towards the node's live stores count).
for (const nodeKey of this._store.getState().nodeCleanupFunctions.keys()) {
initializeNodeIfNewToStore(
this._store,
storeState.currentTree,
nodeKey,
'get',
);
updateRetainCount(this._store, nodeKey, 1);
}
this.retain();
this.autorelease_INTERNAL();
}
retain(): () => void {
if (!gkx('recoil_memory_managament_2020')) {
return () => undefined;
}
this._refCount++;
let released = false;
return () => {
if (!released) {
released = true;
this.release_INTERNAL();
}
};
}
autorelease_INTERNAL(): void {
if (!gkx('recoil_memory_managament_2020')) {
return;
}
if (!isSSR) {
window.setTimeout(() => this.release_INTERNAL(), 0);
}
}
release_INTERNAL(): void {
if (!gkx('recoil_memory_managament_2020')) {
return;
}
this._refCount--;
if (this._refCount === 0) {
// Temporarily nerfing this to allow us to find broken call sites without
// actually breaking anybody yet.
// for (const k of this._store.getState().nodeCleanupFunctions.keys()) {
// updateRetainCountToZero(this._store, k);
// }
}
}
checkRefCount_INTERNAL(): void {
if (gkx('recoil_memory_managament_2020') && this._refCount <= 0) {
if (__DEV__) {
recoverableViolation(retainWarning, 'recoil');
}
// What we will ship later:
// throw err(retainWarning);
}
}
getStore_INTERNAL(): Store {
this.checkRefCount_INTERNAL();
return this._store;
}
getID(): SnapshotID {
this.checkRefCount_INTERNAL();
return this.getID_INTERNAL();
}
getID_INTERNAL(): StateID {
this.checkRefCount_INTERNAL();
return this._store.getState().currentTree.stateID;
}
// We want to allow the methods to be destructured and used as accessors
// eslint-disable-next-line fb-www/extra-arrow-initializer
getLoadable: <T>(RecoilValue<T>) => Loadable<T> = <T>(
recoilValue: RecoilValue<T>,
): Loadable<T> => {
this.checkRefCount_INTERNAL();
return getRecoilValueAsLoadable(this._store, recoilValue);
};
// We want to allow the methods to be destructured and used as accessors
// eslint-disable-next-line fb-www/extra-arrow-initializer
getPromise: <T>(RecoilValue<T>) => Promise<T> = <T>(
recoilValue: RecoilValue<T>,
): Promise<T> => {
this.checkRefCount_INTERNAL();
return this.getLoadable(recoilValue).toPromise();
};
// We want to allow the methods to be destructured and used as accessors
// eslint-disable-next-line fb-www/extra-arrow-initializer
getNodes_UNSTABLE: (
{
isModified?: boolean,
isInitialized?: boolean,
} | void,
) => Iterable<RecoilValue<mixed>> = opt => {
this.checkRefCount_INTERNAL();
// TODO Deal with modified selectors
if (opt?.isModified === true) {
if (opt?.isInitialized === false) {
return [];
}
const state = this._store.getState().currentTree;
return recoilValuesForKeys(state.dirtyAtoms);
}
const knownAtoms = this._store.getState().knownAtoms;
const knownSelectors = this._store.getState().knownSelectors;
return opt?.isInitialized == null
? recoilValues.values()
: opt.isInitialized === true
? recoilValuesForKeys(
concatIterables([
this._store.getState().knownAtoms,
this._store.getState().knownSelectors,
]),
)
: filterIterable(
recoilValues.values(),
({key}) => !knownAtoms.has(key) && !knownSelectors.has(key),
);
};
// Report the current status of a node.
// This peeks the current state and does not affect the snapshot state at all
// eslint-disable-next-line fb-www/extra-arrow-initializer
getInfo_UNSTABLE: <T>(RecoilValue<T>) => RecoilValueInfo<T> = <T>({
key,
}: RecoilValue<T>) => {
this.checkRefCount_INTERNAL();
return peekNodeInfo(this._store, this._store.getState().currentTree, key);
};
// eslint-disable-next-line fb-www/extra-arrow-initializer
map: ((MutableSnapshot) => void) => Snapshot = mapper => {
this.checkRefCount_INTERNAL();
const mutableSnapshot = new MutableSnapshot(this, batchUpdates);
mapper(mutableSnapshot); // if removing batchUpdates from `set` add it here
return cloneSnapshot(mutableSnapshot.getStore_INTERNAL());
};
// eslint-disable-next-line fb-www/extra-arrow-initializer
asyncMap: ((MutableSnapshot) => Promise<void>) => Promise<Snapshot> =
async mapper => {
this.checkRefCount_INTERNAL();
const mutableSnapshot = new MutableSnapshot(this, batchUpdates);
await mapper(mutableSnapshot);
return cloneSnapshot(mutableSnapshot.getStore_INTERNAL());
};
}
function cloneStoreState(
store: Store,
treeState: TreeState,
bumpVersion: boolean = false,
): StoreState {
const storeState = store.getState();
const version = bumpVersion ? getNextTreeStateVersion() : treeState.version;
return {
currentTree: bumpVersion
? {
// TODO snapshots shouldn't really have versions because a new version number
// is always assigned when the snapshot is gone to.
version,
stateID: version,
transactionMetadata: {...treeState.transactionMetadata},
dirtyAtoms: new Set(treeState.dirtyAtoms),
atomValues: treeState.atomValues.clone(),
nonvalidatedAtoms: treeState.nonvalidatedAtoms.clone(),
}
: treeState,
commitDepth: 0,
nextTree: null,
previousTree: null,
knownAtoms: new Set(storeState.knownAtoms), // FIXME here's a copy
knownSelectors: new Set(storeState.knownSelectors), // FIXME here's a copy
transactionSubscriptions: new Map(),
nodeTransactionSubscriptions: new Map(),
nodeToComponentSubscriptions: new Map(),
queuedComponentCallbacks_DEPRECATED: [],
suspendedComponentResolvers: new Set(),
graphsByVersion: new Map().set(version, store.getGraph(treeState.version)),
versionsUsedByComponent: new Map(),
retention: {
referenceCounts: new Map(),
nodesRetainedByZone: new Map(),
retainablesToCheckForRelease: new Set(),
},
nodeCleanupFunctions: new Map(),
};
}
// Factory to build a fresh snapshot
function freshSnapshot(initializeState?: MutableSnapshot => void): Snapshot {
const snapshot = new Snapshot(makeEmptyStoreState());
return initializeState != null ? snapshot.map(initializeState) : snapshot;
}
// Factory to clone a snapahot state
function cloneSnapshot(
store: Store,
version: 'current' | 'previous' = 'current',
): Snapshot {
const storeState = store.getState();
const treeState =
version === 'current'
? storeState.currentTree
: nullthrows(storeState.previousTree);
return new Snapshot(cloneStoreState(store, treeState));
}
class MutableSnapshot extends Snapshot {
_batch: (() => void) => void;
constructor(snapshot: Snapshot, batch: (() => void) => void) {
super(
cloneStoreState(
snapshot.getStore_INTERNAL(),
snapshot.getStore_INTERNAL().getState().currentTree,
true,
),
);
this._batch = batch;
}
// We want to allow the methods to be destructured and used as accessors
// eslint-disable-next-line fb-www/extra-arrow-initializer
set: SetRecoilState = <T>(
recoilState: RecoilState<T>,
newValueOrUpdater: ValueOrUpdater<T>,
) => {
this.checkRefCount_INTERNAL();
const store = this.getStore_INTERNAL();
// This batchUpdates ensures this `set` is applied immediately and you can
// read the written value after calling `set`. I would like to remove this
// behavior and only batch in `Snapshot.map`, but this would be a breaking
// change potentially.
this._batch(() => {
updateRetainCount(store, recoilState.key, 1);
setRecoilValue(this.getStore_INTERNAL(), recoilState, newValueOrUpdater);
});
};
// We want to allow the methods to be destructured and used as accessors
// eslint-disable-next-line fb-www/extra-arrow-initializer
reset: ResetRecoilState = <T>(recoilState: RecoilState<T>) => {
this.checkRefCount_INTERNAL();
const store = this.getStore_INTERNAL();
// See note at `set` about batched updates.
this._batch(() => {
updateRetainCount(store, recoilState.key, 1);
setRecoilValue(this.getStore_INTERNAL(), recoilState, DEFAULT_VALUE);
});
};
// We want to allow the methods to be destructured and used as accessors
// eslint-disable-next-line fb-www/extra-arrow-initializer
setUnvalidatedAtomValues_DEPRECATED: (Map<NodeKey, mixed>) => void = (
values: Map<NodeKey, mixed>,
) => {
this.checkRefCount_INTERNAL();
const store = this.getStore_INTERNAL();
// See note at `set` about batched updates.
batchUpdates(() => {
for (const [k, v] of values.entries()) {
updateRetainCount(store, k, 1);
setUnvalidatedRecoilValue(store, new AbstractRecoilValue(k), v);
}
});
};
}
module.exports = {
Snapshot,
MutableSnapshot,
freshSnapshot,
cloneSnapshot,
}; |
const concatIterables = require('../util/Recoil_concatIterables'); |
run.py | from dash import dcc, html
from dash.dependencies import Input, Output
from app import app
from layouts import index, record, watch, replay, about
# from examples.run import callback_example
from callbacks.record import *
from callbacks.watch import *
from callbacks.replay import *
layout = html.Article([
dcc.Location(id='url', refresh=False), # 定位地址栏
html.Section(id='page-content'), # 页面布局
])
@app.callback(Output('page-content', 'children'),
Input('url', 'pathname'))
def display_page(pathname):
if pathname == '/':
return index.layout
if pathname == '/record':
return record.layout
if pathname == '/watch':
return watch.layout
if pathname == '/replay':
return replay.layout
if pathname == '/about':
return about.layout
# elif pathname.startswith('/examples/'):
# return callback_example(pathname)
# else:
# return '404' |
if __name__ == '__main__':
import asyncio
from dash_xinet.server import run_server
port = 7777
# app.run_server(debug=True, port=5555, threaded=True)
# app.run_server(app, debug=True, port=5555, threaded=True)
run = run_server(app, layout,
port=port, debug=True
)
asyncio.run(run)
else:
app.layout = layout
server = app.server # 用于 Dash 服务器部署 |
app.config.suppress_callback_exceptions = True # 用于支持多页应用 |
createChannel.js | import { CREATE_CHANNEL } from '../actions/actionsTypes';
const initialState = {
isFetching: false,
failure: false,
result: '',
error: ''
};
export default function messages(state = initialState, action) {
switch (action.type) {
case CREATE_CHANNEL.REQUEST:
return {
...state,
isFetching: true,
failure: false,
error: ''
};
case CREATE_CHANNEL.SUCCESS:
return {
...state,
isFetching: false,
failure: false,
result: action.data
};
case CREATE_CHANNEL.FAILURE:
return {
...state, | };
default:
return state;
}
} | isFetching: false,
failure: true,
error: action.err |
path.rs | //! Utility methods for URL paths
/// The following rules are applied iteratively until no further processing can
/// be done:
/// 1. Replace multiple slashes with a single slash.
/// 2. Eliminate each . path name element (the current directory).
/// 3. Eliminate each inner .. path name element (the parent directory)
/// along with the non-.. element that precedes it.
/// 4. Eliminate .. elements that begin a rooted path:
/// that is, replace "/.." by "/" at the beginning of a path.
///
/// If the result of this process is an empty string, "/" is returned
pub fn clean(p: &str) -> String {
// Turn empty string into "/"
if p == "" {
return "/".to_string();
}
let mut buf: Vec<u8> = Vec::new();
let n = p.len();
// next byte to process.
let mut r = 1;
// next byte to write.
let mut w = 1;
// path must start with '/'
if !p.starts_with('/') {
r = 0;
buf.resize(n + 1, 0);
buf[0] = b'/';
}
let mut trailing = n > 1 && p.ends_with('/');
let p = p.as_bytes();
while r < n {
match p[r] {
// empty path element, trailing slash is added after the end
b'/' => r += 1,
b'.' => {
if r + 1 == n {
trailing = true;
r += 1;
} else if p[r + 1] == b'/' {
// . element
r += 2;
} else if p[r + 1] == b'.' && (r + 2 == n || p[r + 2] == b'/') {
// .. element: remove to last /
r += 3;
if w > 1 {
// can backtrack
w -= 1;
if buf.is_empty() {
while w > 1 && p[w] != b'/' {
w -= 1;
}
} else {
while w > 1 && buf[w] != b'/' {
w -= 1;
}
}
}
}
}
_ => {
// real path element.
// add slash if needed
if w > 1 {
buf_app(&mut buf, p, w, b'/');
w += 1;
}
// copy element
while r < n && p[r] != b'/' {
buf_app(&mut buf, p, w, p[r]);
w += 1;
r += 1;
}
}
}
}
// re-append trailing slash
if trailing && w > 1 {
buf_app(&mut buf, p, w, b'/');
w += 1;
}
if buf.is_empty() {
return String::from_utf8(p[..w].to_vec()).unwrap();
}
String::from_utf8(buf[..w].to_vec()).unwrap()
}
#[inline]
fn buf_app(buf: &mut Vec<u8>, s: &[u8], w: usize, c: u8) {
if buf.is_empty() {
// If the next character is the same as in the original string, we do
// not have to allocate.
if s[w] == c {
return;
}
buf.resize(s.len(), 0);
buf[..w].copy_from_slice(&s[..w]);
}
buf[w] = c;
}
#[cfg(test)]
mod tests {
use super::*;
// path, result
fn clean_tests() -> Vec<(&'static str, &'static str)> {
vec![
// Already clean
("/", "/"),
("/abc", "/abc"),
("/a/b/c", "/a/b/c"),
("/abc/", "/abc/"),
("/a/b/c/", "/a/b/c/"),
// missing root
("", "/"),
("a/", "/a/"),
("abc", "/abc"),
("abc/def", "/abc/def"),
("a/b/c", "/a/b/c"),
// Remove doubled slash
("//", "/"),
("/abc//", "/abc/"),
("/abc/def//", "/abc/def/"),
("/a/b/c//", "/a/b/c/"),
("/abc//def//ghi", "/abc/def/ghi"),
("//abc", "/abc"),
("///abc", "/abc"),
("//abc//", "/abc/"),
// Remove . elements
(".", "/"),
("./", "/"),
("/abc/./def", "/abc/def"),
("/./abc/def", "/abc/def"),
("/abc/.", "/abc/"),
// Remove .. elements
("..", "/"),
("../", "/"),
("../../", "/"),
("../..", "/"),
("../../abc", "/abc"),
("/abc/def/ghi/../jkl", "/abc/def/jkl"),
("/abc/def/../ghi/../jkl", "/abc/jkl"),
("/abc/def/..", "/abc"),
("/abc/def/../..", "/"),
("/abc/def/../../..", "/"),
("/abc/def/../../..", "/"),
("/abc/def/../../../ghi/jkl/../../../mno", "/mno"),
// Combinations
("abc/./../def", "/def"),
("abc//./../def", "/def"),
("abc/../../././../def", "/def"),
]
}
#[test]
fn test_path_clean() {
let tests = clean_tests();
for test in tests {
let s = clean(test.0);
assert_eq!(test.1, s);
let s = clean(test.1);
assert_eq!(test.1, s);
}
}
#[test]
fn | () {
let mut test_paths: Vec<(String, String)> = Vec::new();
for i in 1..1234 {
let ss = "a".repeat(i);
let correct_path = format!("{}{}", "/", ss);
test_paths.push((correct_path.clone(), correct_path.clone()));
test_paths.push((ss.clone(), correct_path.clone()));
test_paths.push((format!("{}{}", "//", ss), correct_path.clone()));
test_paths.push((format!("{}{}{}", "//", ss, "/b/.."), correct_path.clone()));
}
for test in test_paths {
let s = clean(&test.0);
assert_eq!(test.1, s);
let s = clean(&test.1);
assert_eq!(test.1, s);
}
}
}
| test_path_clean_long |
export.ts | import { manualReviewSchema } from './manual-review';
import { invalidValueSchema } from './invalid-value/invalid-value';
import { autoDeclineIVSchema } from './invalid-value/auto-decline';
import { exceptionValueIVSchema } from './invalid-value/exception-value';
import { invalidItemsSchema } from './invalid-items/invalid-items';
import { overUnderstockedSchema } from './overstocked-understocked/overstocked-understocked'; | export {
manualReviewSchema,
invalidValueSchema,
autoDeclineIVSchema,
exceptionValueIVSchema,
invalidItemsSchema,
overUnderstockedSchema,
dupedSchema,
dupedCheckFailedSchema
}; | import { dupedSchema } from './duped/duped';
import { dupedCheckFailedSchema } from './duped-check-failed/duped-check-failed';
|
test_get_node_detail.py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.test import TestCase
from gcloud import err_code
from gcloud.taskflow3.models import TaskFlowInstance
from gcloud.tests.mock import * # noqa
from gcloud.tests.mock_settings import * # noqa
class GetNodeDetailTestCase(TestCase):
def test_node_does_not_exist(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.has_node = MagicMock(return_value=False)
detail = taskflow.get_node_detail(node_id="node_id", username="username")
self.assertFalse(detail["result"])
self.assertEqual(detail["code"], err_code.REQUEST_PARAM_INVALID.code)
def test_get_node_data_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
self.assertEqual(detail, get_node_data_return)
def test_get_node_detail_err(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": False}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
| self.assertEqual(detail, get_node_detail_return)
def test_include_data_is_false(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {}}
get_node_detail_return = {"result": True, "data": {}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = False
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_not_called()
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(detail, {"code": 0, "data": {}, "message": "", "result": True})
def test_success(self):
taskflow = TaskFlowInstance()
taskflow.id = 1
taskflow.engine_ver = 2
taskflow.has_node = MagicMock(return_value=True)
dispatcher = MagicMock()
get_node_data_return = {"result": True, "data": {"data": "data"}}
get_node_detail_return = {"result": True, "data": {"detail": "detail"}}
dispatcher.get_node_data = MagicMock(return_value=get_node_data_return)
dispatcher.get_node_detail = MagicMock(return_value=get_node_detail_return)
dispatcher_init = MagicMock(return_value=dispatcher)
node_id = "node_id"
username = "username"
component_code = "component_code"
subprocess_stack = ["1"]
loop = 1
include_data = True
with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, dispatcher_init):
detail = taskflow.get_node_detail(
node_id=node_id,
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
loop=loop,
include_data=include_data,
project_id="project_id",
)
dispatcher_init.assert_called_once_with(engine_ver=taskflow.engine_ver, node_id=node_id, taskflow_id=1)
dispatcher.get_node_data.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
project_id="project_id",
)
dispatcher.get_node_detail.assert_called_once_with(
username=username,
component_code=component_code,
subprocess_stack=subprocess_stack,
pipeline_instance=taskflow.pipeline_instance,
loop=loop,
)
self.assertEqual(
detail, {"code": 0, "data": {"data": "data", "detail": "detail"}, "message": "", "result": True}
) | with patch(TASKFLOW_MODEL_NODE_CMD_DISPATCHER, MagicMock(return_value=dispatcher)):
detail = taskflow.get_node_detail(node_id="node_id", username="username", project_id="project_id")
|
models.go | package notificationhubs
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"encoding/json"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/date"
"github.com/Azure/go-autorest/autorest/to"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// The package's fully qualified name.
const fqdn = "github.com/Azure/azure-sdk-for-go/services/notificationhubs/mgmt/2016-03-01/notificationhubs"
// AdmCredential description of a NotificationHub AdmCredential.
type AdmCredential struct {
// AdmCredentialProperties - Properties of NotificationHub AdmCredential.
*AdmCredentialProperties `json:"properties,omitempty"`
}
// MarshalJSON is the custom marshaler for AdmCredential.
func (ac AdmCredential) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if ac.AdmCredentialProperties != nil {
objectMap["properties"] = ac.AdmCredentialProperties
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for AdmCredential struct.
func (ac *AdmCredential) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var admCredentialProperties AdmCredentialProperties
err = json.Unmarshal(*v, &admCredentialProperties)
if err != nil {
return err
}
ac.AdmCredentialProperties = &admCredentialProperties
}
}
}
return nil
}
// AdmCredentialProperties description of a NotificationHub AdmCredential.
type AdmCredentialProperties struct {
// ClientID - The client identifier.
ClientID *string `json:"clientId,omitempty"`
// ClientSecret - The credential secret access key.
ClientSecret *string `json:"clientSecret,omitempty"`
// AuthTokenURL - The URL of the authorization token.
AuthTokenURL *string `json:"authTokenUrl,omitempty"`
}
// ApnsCredential description of a NotificationHub ApnsCredential.
type ApnsCredential struct {
// ApnsCredentialProperties - Properties of NotificationHub ApnsCredential.
*ApnsCredentialProperties `json:"properties,omitempty"`
}
// MarshalJSON is the custom marshaler for ApnsCredential.
func (ac ApnsCredential) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if ac.ApnsCredentialProperties != nil {
objectMap["properties"] = ac.ApnsCredentialProperties
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for ApnsCredential struct.
func (ac *ApnsCredential) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var apnsCredentialProperties ApnsCredentialProperties
err = json.Unmarshal(*v, &apnsCredentialProperties)
if err != nil {
return err
}
ac.ApnsCredentialProperties = &apnsCredentialProperties
}
}
}
return nil
}
// ApnsCredentialProperties description of a NotificationHub ApnsCredential.
type ApnsCredentialProperties struct {
// ApnsCertificate - The APNS certificate.
ApnsCertificate *string `json:"apnsCertificate,omitempty"`
// CertificateKey - The certificate key.
CertificateKey *string `json:"certificateKey,omitempty"`
// Endpoint - The endpoint of this credential.
Endpoint *string `json:"endpoint,omitempty"`
// Thumbprint - The APNS certificate Thumbprint
Thumbprint *string `json:"thumbprint,omitempty"`
}
// BaiduCredential description of a NotificationHub BaiduCredential.
type BaiduCredential struct {
// BaiduCredentialProperties - Properties of NotificationHub BaiduCredential.
*BaiduCredentialProperties `json:"properties,omitempty"`
}
// MarshalJSON is the custom marshaler for BaiduCredential.
func (bc BaiduCredential) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if bc.BaiduCredentialProperties != nil {
objectMap["properties"] = bc.BaiduCredentialProperties
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for BaiduCredential struct.
func (bc *BaiduCredential) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var baiduCredentialProperties BaiduCredentialProperties
err = json.Unmarshal(*v, &baiduCredentialProperties)
if err != nil {
return err
}
bc.BaiduCredentialProperties = &baiduCredentialProperties
}
}
}
return nil
}
// BaiduCredentialProperties description of a NotificationHub BaiduCredential.
type BaiduCredentialProperties struct {
// BaiduAPIKey - Baidu Api Key.
BaiduAPIKey *string `json:"baiduApiKey,omitempty"`
// BaiduEndPoint - Baidu Endpoint.
BaiduEndPoint *string `json:"baiduEndPoint,omitempty"`
// BaiduSecretKey - Baidu Secret Key
BaiduSecretKey *string `json:"baiduSecretKey,omitempty"`
}
// CheckAvailabilityParameters parameters supplied to the Check Name Availability for Namespace and
// NotificationHubs.
type CheckAvailabilityParameters struct {
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
// IsAvailiable - True if the name is available and can be used to create new Namespace/NotificationHub. Otherwise false.
IsAvailiable *bool `json:"isAvailiable,omitempty"`
}
// MarshalJSON is the custom marshaler for CheckAvailabilityParameters.
func (capVar CheckAvailabilityParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if capVar.Name != nil {
objectMap["name"] = capVar.Name
}
if capVar.Location != nil {
objectMap["location"] = capVar.Location
}
if capVar.Tags != nil {
objectMap["tags"] = capVar.Tags
}
if capVar.Sku != nil {
objectMap["sku"] = capVar.Sku
}
if capVar.IsAvailiable != nil {
objectMap["isAvailiable"] = capVar.IsAvailiable
}
return json.Marshal(objectMap)
}
// CheckAvailabilityResult description of a CheckAvailability resource.
type CheckAvailabilityResult struct {
autorest.Response `json:"-"`
// IsAvailiable - True if the name is available and can be used to create new Namespace/NotificationHub. Otherwise false.
IsAvailiable *bool `json:"isAvailiable,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for CheckAvailabilityResult.
func (car CheckAvailabilityResult) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if car.IsAvailiable != nil {
objectMap["isAvailiable"] = car.IsAvailiable
}
if car.Location != nil {
objectMap["location"] = car.Location
}
if car.Tags != nil {
objectMap["tags"] = car.Tags
}
if car.Sku != nil {
objectMap["sku"] = car.Sku
}
return json.Marshal(objectMap)
}
// CreateOrUpdateParameters parameters supplied to the CreateOrUpdate NotificationHub operation.
type CreateOrUpdateParameters struct {
// Properties - Properties of the NotificationHub.
*Properties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for CreateOrUpdateParameters.
func (coup CreateOrUpdateParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if coup.Properties != nil {
objectMap["properties"] = coup.Properties
}
if coup.Location != nil {
objectMap["location"] = coup.Location
}
if coup.Tags != nil {
objectMap["tags"] = coup.Tags
}
if coup.Sku != nil {
objectMap["sku"] = coup.Sku
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for CreateOrUpdateParameters struct.
func (coup *CreateOrUpdateParameters) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var properties Properties
err = json.Unmarshal(*v, &properties)
if err != nil {
return err
}
coup.Properties = &properties
}
case "id":
if v != nil {
var ID string
err = json.Unmarshal(*v, &ID)
if err != nil {
return err
}
coup.ID = &ID
}
case "name":
if v != nil {
var name string
err = json.Unmarshal(*v, &name)
if err != nil {
return err
}
coup.Name = &name
}
case "type":
if v != nil {
var typeVar string
err = json.Unmarshal(*v, &typeVar)
if err != nil {
return err
}
coup.Type = &typeVar
}
case "location":
if v != nil {
var location string
err = json.Unmarshal(*v, &location)
if err != nil {
return err
}
coup.Location = &location
}
case "tags":
if v != nil {
var tags map[string]*string
err = json.Unmarshal(*v, &tags)
if err != nil {
return err
}
coup.Tags = tags
}
case "sku":
if v != nil {
var sku Sku
err = json.Unmarshal(*v, &sku)
if err != nil {
return err
}
coup.Sku = &sku
}
}
}
return nil
}
// GcmCredential description of a NotificationHub GcmCredential.
type GcmCredential struct {
// GcmCredentialProperties - Properties of NotificationHub GcmCredential.
*GcmCredentialProperties `json:"properties,omitempty"`
}
// MarshalJSON is the custom marshaler for GcmCredential.
func (gc GcmCredential) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if gc.GcmCredentialProperties != nil {
objectMap["properties"] = gc.GcmCredentialProperties
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for GcmCredential struct.
func (gc *GcmCredential) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var gcmCredentialProperties GcmCredentialProperties
err = json.Unmarshal(*v, &gcmCredentialProperties)
if err != nil {
return err
}
gc.GcmCredentialProperties = &gcmCredentialProperties
}
}
}
return nil
}
// GcmCredentialProperties description of a NotificationHub GcmCredential.
type GcmCredentialProperties struct {
// GcmEndpoint - The GCM endpoint.
GcmEndpoint *string `json:"gcmEndpoint,omitempty"`
// GoogleAPIKey - The Google API key.
GoogleAPIKey *string `json:"googleApiKey,omitempty"`
}
// ListResult the response of the List NotificationHub operation.
type ListResult struct {
autorest.Response `json:"-"`
// Value - Result of the List NotificationHub operation.
Value *[]ResourceType `json:"value,omitempty"`
// NextLink - Link to the next set of results. Not empty if Value contains incomplete list of NotificationHub
NextLink *string `json:"nextLink,omitempty"`
}
// ListResultIterator provides access to a complete listing of ResourceType values.
type ListResultIterator struct {
i int
page ListResultPage
}
// NextWithContext advances to the next value. If there was an error making
// the request the iterator does not advance and the error is returned.
func (iter *ListResultIterator) NextWithContext(ctx context.Context) (err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ListResultIterator.NextWithContext")
defer func() {
sc := -1
if iter.Response().Response.Response != nil {
sc = iter.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
iter.i++
if iter.i < len(iter.page.Values()) {
return nil
}
err = iter.page.NextWithContext(ctx)
if err != nil {
iter.i--
return err
}
iter.i = 0
return nil
}
// Next advances to the next value. If there was an error making
// the request the iterator does not advance and the error is returned.
// Deprecated: Use NextWithContext() instead.
func (iter *ListResultIterator) Next() error {
return iter.NextWithContext(context.Background())
}
// NotDone returns true if the enumeration should be started or is not yet complete.
func (iter ListResultIterator) NotDone() bool {
return iter.page.NotDone() && iter.i < len(iter.page.Values())
}
// Response returns the raw server response from the last page request.
func (iter ListResultIterator) Response() ListResult {
return iter.page.Response()
}
// Value returns the current value or a zero-initialized value if the
// iterator has advanced beyond the end of the collection.
func (iter ListResultIterator) Value() ResourceType {
if !iter.page.NotDone() {
return ResourceType{}
}
return iter.page.Values()[iter.i]
}
// Creates a new instance of the ListResultIterator type.
func NewListResultIterator(page ListResultPage) ListResultIterator {
return ListResultIterator{page: page}
}
// IsEmpty returns true if the ListResult contains no values.
func (lr ListResult) IsEmpty() bool {
return lr.Value == nil || len(*lr.Value) == 0
}
// hasNextLink returns true if the NextLink is not empty.
func (lr ListResult) hasNextLink() bool {
return lr.NextLink != nil && len(*lr.NextLink) != 0
}
// listResultPreparer prepares a request to retrieve the next set of results.
// It returns nil if no more results exist.
func (lr ListResult) listResultPreparer(ctx context.Context) (*http.Request, error) {
if !lr.hasNextLink() {
return nil, nil
}
return autorest.Prepare((&http.Request{}).WithContext(ctx),
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(to.String(lr.NextLink)))
}
// ListResultPage contains a page of ResourceType values.
type ListResultPage struct {
fn func(context.Context, ListResult) (ListResult, error)
lr ListResult
}
// NextWithContext advances to the next page of values. If there was an error making
// the request the page does not advance and the error is returned.
func (page *ListResultPage) NextWithContext(ctx context.Context) (err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ListResultPage.NextWithContext")
defer func() {
sc := -1
if page.Response().Response.Response != nil {
sc = page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
for {
next, err := page.fn(ctx, page.lr)
if err != nil {
return err
}
page.lr = next
if !next.hasNextLink() || !next.IsEmpty() {
break
}
}
return nil
}
// Next advances to the next page of values. If there was an error making
// the request the page does not advance and the error is returned.
// Deprecated: Use NextWithContext() instead.
func (page *ListResultPage) Next() error {
return page.NextWithContext(context.Background())
}
// NotDone returns true if the page enumeration should be started or is not yet complete.
func (page ListResultPage) NotDone() bool {
return !page.lr.IsEmpty()
}
// Response returns the raw server response from the last page request.
func (page ListResultPage) Response() ListResult {
return page.lr
}
// Values returns the slice of values for the current page or nil if there are no values.
func (page ListResultPage) Values() []ResourceType {
if page.lr.IsEmpty() {
return nil
}
return *page.lr.Value
}
// Creates a new instance of the ListResultPage type.
func NewListResultPage(getNextPage func(context.Context, ListResult) (ListResult, error)) ListResultPage {
return ListResultPage{fn: getNextPage}
}
// MpnsCredential description of a NotificationHub MpnsCredential.
type MpnsCredential struct {
// MpnsCredentialProperties - Properties of NotificationHub MpnsCredential.
*MpnsCredentialProperties `json:"properties,omitempty"`
}
// MarshalJSON is the custom marshaler for MpnsCredential.
func (mc MpnsCredential) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if mc.MpnsCredentialProperties != nil {
objectMap["properties"] = mc.MpnsCredentialProperties
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for MpnsCredential struct.
func (mc *MpnsCredential) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var mpnsCredentialProperties MpnsCredentialProperties
err = json.Unmarshal(*v, &mpnsCredentialProperties)
if err != nil {
return err
}
mc.MpnsCredentialProperties = &mpnsCredentialProperties
}
}
}
return nil
}
// MpnsCredentialProperties description of a NotificationHub MpnsCredential.
type MpnsCredentialProperties struct {
// MpnsCertificate - The MPNS certificate.
MpnsCertificate *string `json:"mpnsCertificate,omitempty"`
// CertificateKey - The certificate key for this credential.
CertificateKey *string `json:"certificateKey,omitempty"`
// Thumbprint - The MPNS certificate Thumbprint
Thumbprint *string `json:"thumbprint,omitempty"`
}
// NamespaceCreateOrUpdateParameters parameters supplied to the CreateOrUpdate Namespace operation.
type NamespaceCreateOrUpdateParameters struct {
// NamespaceProperties - Properties of the Namespace.
*NamespaceProperties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for NamespaceCreateOrUpdateParameters.
func (ncoup NamespaceCreateOrUpdateParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if ncoup.NamespaceProperties != nil {
objectMap["properties"] = ncoup.NamespaceProperties
}
if ncoup.Location != nil {
objectMap["location"] = ncoup.Location
}
if ncoup.Tags != nil {
objectMap["tags"] = ncoup.Tags
}
if ncoup.Sku != nil {
objectMap["sku"] = ncoup.Sku
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for NamespaceCreateOrUpdateParameters struct.
func (ncoup *NamespaceCreateOrUpdateParameters) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var namespaceProperties NamespaceProperties
err = json.Unmarshal(*v, &namespaceProperties)
if err != nil {
return err
}
ncoup.NamespaceProperties = &namespaceProperties
}
case "id":
if v != nil {
var ID string
err = json.Unmarshal(*v, &ID)
if err != nil {
return err
}
ncoup.ID = &ID
}
case "name":
if v != nil {
var name string
err = json.Unmarshal(*v, &name)
if err != nil {
return err
}
ncoup.Name = &name
}
case "type":
if v != nil {
var typeVar string
err = json.Unmarshal(*v, &typeVar)
if err != nil {
return err
}
ncoup.Type = &typeVar
}
case "location":
if v != nil {
var location string
err = json.Unmarshal(*v, &location)
if err != nil {
return err
}
ncoup.Location = &location
}
case "tags":
if v != nil {
var tags map[string]*string
err = json.Unmarshal(*v, &tags)
if err != nil {
return err
}
ncoup.Tags = tags
}
case "sku":
if v != nil {
var sku Sku
err = json.Unmarshal(*v, &sku)
if err != nil {
return err
}
ncoup.Sku = &sku
}
}
}
return nil
}
| type NamespaceListResult struct {
autorest.Response `json:"-"`
// Value - Result of the List Namespace operation.
Value *[]NamespaceResource `json:"value,omitempty"`
// NextLink - Link to the next set of results. Not empty if Value contains incomplete list of Namespaces
NextLink *string `json:"nextLink,omitempty"`
}
// NamespaceListResultIterator provides access to a complete listing of NamespaceResource values.
type NamespaceListResultIterator struct {
i int
page NamespaceListResultPage
}
// NextWithContext advances to the next value. If there was an error making
// the request the iterator does not advance and the error is returned.
func (iter *NamespaceListResultIterator) NextWithContext(ctx context.Context) (err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/NamespaceListResultIterator.NextWithContext")
defer func() {
sc := -1
if iter.Response().Response.Response != nil {
sc = iter.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
iter.i++
if iter.i < len(iter.page.Values()) {
return nil
}
err = iter.page.NextWithContext(ctx)
if err != nil {
iter.i--
return err
}
iter.i = 0
return nil
}
// Next advances to the next value. If there was an error making
// the request the iterator does not advance and the error is returned.
// Deprecated: Use NextWithContext() instead.
func (iter *NamespaceListResultIterator) Next() error {
return iter.NextWithContext(context.Background())
}
// NotDone returns true if the enumeration should be started or is not yet complete.
func (iter NamespaceListResultIterator) NotDone() bool {
return iter.page.NotDone() && iter.i < len(iter.page.Values())
}
// Response returns the raw server response from the last page request.
func (iter NamespaceListResultIterator) Response() NamespaceListResult {
return iter.page.Response()
}
// Value returns the current value or a zero-initialized value if the
// iterator has advanced beyond the end of the collection.
func (iter NamespaceListResultIterator) Value() NamespaceResource {
if !iter.page.NotDone() {
return NamespaceResource{}
}
return iter.page.Values()[iter.i]
}
// Creates a new instance of the NamespaceListResultIterator type.
func NewNamespaceListResultIterator(page NamespaceListResultPage) NamespaceListResultIterator {
return NamespaceListResultIterator{page: page}
}
// IsEmpty returns true if the ListResult contains no values.
func (nlr NamespaceListResult) IsEmpty() bool {
return nlr.Value == nil || len(*nlr.Value) == 0
}
// hasNextLink returns true if the NextLink is not empty.
func (nlr NamespaceListResult) hasNextLink() bool {
return nlr.NextLink != nil && len(*nlr.NextLink) != 0
}
// namespaceListResultPreparer prepares a request to retrieve the next set of results.
// It returns nil if no more results exist.
func (nlr NamespaceListResult) namespaceListResultPreparer(ctx context.Context) (*http.Request, error) {
if !nlr.hasNextLink() {
return nil, nil
}
return autorest.Prepare((&http.Request{}).WithContext(ctx),
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(to.String(nlr.NextLink)))
}
// NamespaceListResultPage contains a page of NamespaceResource values.
type NamespaceListResultPage struct {
fn func(context.Context, NamespaceListResult) (NamespaceListResult, error)
nlr NamespaceListResult
}
// NextWithContext advances to the next page of values. If there was an error making
// the request the page does not advance and the error is returned.
func (page *NamespaceListResultPage) NextWithContext(ctx context.Context) (err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/NamespaceListResultPage.NextWithContext")
defer func() {
sc := -1
if page.Response().Response.Response != nil {
sc = page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
for {
next, err := page.fn(ctx, page.nlr)
if err != nil {
return err
}
page.nlr = next
if !next.hasNextLink() || !next.IsEmpty() {
break
}
}
return nil
}
// Next advances to the next page of values. If there was an error making
// the request the page does not advance and the error is returned.
// Deprecated: Use NextWithContext() instead.
func (page *NamespaceListResultPage) Next() error {
return page.NextWithContext(context.Background())
}
// NotDone returns true if the page enumeration should be started or is not yet complete.
func (page NamespaceListResultPage) NotDone() bool {
return !page.nlr.IsEmpty()
}
// Response returns the raw server response from the last page request.
func (page NamespaceListResultPage) Response() NamespaceListResult {
return page.nlr
}
// Values returns the slice of values for the current page or nil if there are no values.
func (page NamespaceListResultPage) Values() []NamespaceResource {
if page.nlr.IsEmpty() {
return nil
}
return *page.nlr.Value
}
// Creates a new instance of the NamespaceListResultPage type.
func NewNamespaceListResultPage(getNextPage func(context.Context, NamespaceListResult) (NamespaceListResult, error)) NamespaceListResultPage {
return NamespaceListResultPage{fn: getNextPage}
}
// NamespacePatchParameters parameters supplied to the Patch Namespace operation.
type NamespacePatchParameters struct {
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for NamespacePatchParameters.
func (npp NamespacePatchParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if npp.Tags != nil {
objectMap["tags"] = npp.Tags
}
if npp.Sku != nil {
objectMap["sku"] = npp.Sku
}
return json.Marshal(objectMap)
}
// NamespaceProperties namespace properties.
type NamespaceProperties struct {
// Name - The name of the namespace.
Name *string `json:"name,omitempty"`
// ProvisioningState - Provisioning state of the Namespace.
ProvisioningState *string `json:"provisioningState,omitempty"`
// Region - Specifies the targeted region in which the namespace should be created. It can be any of the following values: Australia East, Australia Southeast, Central US, East US, East US 2, West US, North Central US, South Central US, East Asia, Southeast Asia, Brazil South, Japan East, Japan West, North Europe, West Europe
Region *string `json:"region,omitempty"`
// Status - Status of the namespace. It can be any of these values:1 = Created/Active2 = Creating3 = Suspended4 = Deleting
Status *string `json:"status,omitempty"`
// CreatedAt - The time the namespace was created.
CreatedAt *date.Time `json:"createdAt,omitempty"`
// ServiceBusEndpoint - Endpoint you can use to perform NotificationHub operations.
ServiceBusEndpoint *string `json:"serviceBusEndpoint,omitempty"`
// SubscriptionID - The Id of the Azure subscription associated with the namespace.
SubscriptionID *string `json:"subscriptionId,omitempty"`
// ScaleUnit - ScaleUnit where the namespace gets created
ScaleUnit *string `json:"scaleUnit,omitempty"`
// Enabled - Whether or not the namespace is currently enabled.
Enabled *bool `json:"enabled,omitempty"`
// Critical - Whether or not the namespace is set as Critical.
Critical *bool `json:"critical,omitempty"`
// NamespaceType - The namespace type. Possible values include: 'Messaging', 'NotificationHub'
NamespaceType NamespaceType `json:"namespaceType,omitempty"`
}
// NamespaceResource description of a Namespace resource.
type NamespaceResource struct {
autorest.Response `json:"-"`
// NamespaceProperties - Properties of the Namespace.
*NamespaceProperties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for NamespaceResource.
func (nr NamespaceResource) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if nr.NamespaceProperties != nil {
objectMap["properties"] = nr.NamespaceProperties
}
if nr.Location != nil {
objectMap["location"] = nr.Location
}
if nr.Tags != nil {
objectMap["tags"] = nr.Tags
}
if nr.Sku != nil {
objectMap["sku"] = nr.Sku
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for NamespaceResource struct.
func (nr *NamespaceResource) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var namespaceProperties NamespaceProperties
err = json.Unmarshal(*v, &namespaceProperties)
if err != nil {
return err
}
nr.NamespaceProperties = &namespaceProperties
}
case "id":
if v != nil {
var ID string
err = json.Unmarshal(*v, &ID)
if err != nil {
return err
}
nr.ID = &ID
}
case "name":
if v != nil {
var name string
err = json.Unmarshal(*v, &name)
if err != nil {
return err
}
nr.Name = &name
}
case "type":
if v != nil {
var typeVar string
err = json.Unmarshal(*v, &typeVar)
if err != nil {
return err
}
nr.Type = &typeVar
}
case "location":
if v != nil {
var location string
err = json.Unmarshal(*v, &location)
if err != nil {
return err
}
nr.Location = &location
}
case "tags":
if v != nil {
var tags map[string]*string
err = json.Unmarshal(*v, &tags)
if err != nil {
return err
}
nr.Tags = tags
}
case "sku":
if v != nil {
var sku Sku
err = json.Unmarshal(*v, &sku)
if err != nil {
return err
}
nr.Sku = &sku
}
}
}
return nil
}
// NamespacesDeleteFuture an abstraction for monitoring and retrieving the results of a long-running operation.
type NamespacesDeleteFuture struct {
azure.Future
}
// Result returns the result of the asynchronous operation.
// If the operation has not completed it will return an error.
func (future *NamespacesDeleteFuture) Result(client NamespacesClient) (ar autorest.Response, err error) {
var done bool
done, err = future.DoneWithContext(context.Background(), client)
if err != nil {
err = autorest.NewErrorWithError(err, "notificationhubs.NamespacesDeleteFuture", "Result", future.Response(), "Polling failure")
return
}
if !done {
err = azure.NewAsyncOpIncompleteError("notificationhubs.NamespacesDeleteFuture")
return
}
ar.Response = future.Response()
return
}
// PnsCredentialsProperties description of a NotificationHub PNS Credentials.
type PnsCredentialsProperties struct {
// ApnsCredential - The ApnsCredential of the created NotificationHub
ApnsCredential *ApnsCredential `json:"apnsCredential,omitempty"`
// WnsCredential - The WnsCredential of the created NotificationHub
WnsCredential *WnsCredential `json:"wnsCredential,omitempty"`
// GcmCredential - The GcmCredential of the created NotificationHub
GcmCredential *GcmCredential `json:"gcmCredential,omitempty"`
// MpnsCredential - The MpnsCredential of the created NotificationHub
MpnsCredential *MpnsCredential `json:"mpnsCredential,omitempty"`
// AdmCredential - The AdmCredential of the created NotificationHub
AdmCredential *AdmCredential `json:"admCredential,omitempty"`
// BaiduCredential - The BaiduCredential of the created NotificationHub
BaiduCredential *BaiduCredential `json:"baiduCredential,omitempty"`
}
// PnsCredentialsResource description of a NotificationHub PNS Credentials.
type PnsCredentialsResource struct {
autorest.Response `json:"-"`
// PnsCredentialsProperties - NotificationHub PNS Credentials.
*PnsCredentialsProperties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for PnsCredentialsResource.
func (pcr PnsCredentialsResource) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if pcr.PnsCredentialsProperties != nil {
objectMap["properties"] = pcr.PnsCredentialsProperties
}
if pcr.Location != nil {
objectMap["location"] = pcr.Location
}
if pcr.Tags != nil {
objectMap["tags"] = pcr.Tags
}
if pcr.Sku != nil {
objectMap["sku"] = pcr.Sku
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for PnsCredentialsResource struct.
func (pcr *PnsCredentialsResource) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var pnsCredentialsProperties PnsCredentialsProperties
err = json.Unmarshal(*v, &pnsCredentialsProperties)
if err != nil {
return err
}
pcr.PnsCredentialsProperties = &pnsCredentialsProperties
}
case "id":
if v != nil {
var ID string
err = json.Unmarshal(*v, &ID)
if err != nil {
return err
}
pcr.ID = &ID
}
case "name":
if v != nil {
var name string
err = json.Unmarshal(*v, &name)
if err != nil {
return err
}
pcr.Name = &name
}
case "type":
if v != nil {
var typeVar string
err = json.Unmarshal(*v, &typeVar)
if err != nil {
return err
}
pcr.Type = &typeVar
}
case "location":
if v != nil {
var location string
err = json.Unmarshal(*v, &location)
if err != nil {
return err
}
pcr.Location = &location
}
case "tags":
if v != nil {
var tags map[string]*string
err = json.Unmarshal(*v, &tags)
if err != nil {
return err
}
pcr.Tags = tags
}
case "sku":
if v != nil {
var sku Sku
err = json.Unmarshal(*v, &sku)
if err != nil {
return err
}
pcr.Sku = &sku
}
}
}
return nil
}
// PolicykeyResource namespace/NotificationHub Regenerate Keys
type PolicykeyResource struct {
// PolicyKey - Name of the key that has to be regenerated for the Namespace/Notification Hub Authorization Rule. The value can be Primary Key/Secondary Key.
PolicyKey *string `json:"policyKey,omitempty"`
}
// Properties notificationHub properties.
type Properties struct {
// Name - The NotificationHub name.
Name *string `json:"name,omitempty"`
// RegistrationTTL - The RegistrationTtl of the created NotificationHub
RegistrationTTL *string `json:"registrationTtl,omitempty"`
// AuthorizationRules - The AuthorizationRules of the created NotificationHub
AuthorizationRules *[]SharedAccessAuthorizationRuleProperties `json:"authorizationRules,omitempty"`
// ApnsCredential - The ApnsCredential of the created NotificationHub
ApnsCredential *ApnsCredential `json:"apnsCredential,omitempty"`
// WnsCredential - The WnsCredential of the created NotificationHub
WnsCredential *WnsCredential `json:"wnsCredential,omitempty"`
// GcmCredential - The GcmCredential of the created NotificationHub
GcmCredential *GcmCredential `json:"gcmCredential,omitempty"`
// MpnsCredential - The MpnsCredential of the created NotificationHub
MpnsCredential *MpnsCredential `json:"mpnsCredential,omitempty"`
// AdmCredential - The AdmCredential of the created NotificationHub
AdmCredential *AdmCredential `json:"admCredential,omitempty"`
// BaiduCredential - The BaiduCredential of the created NotificationHub
BaiduCredential *BaiduCredential `json:"baiduCredential,omitempty"`
}
// Resource ...
type Resource struct {
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for Resource.
func (r Resource) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if r.Location != nil {
objectMap["location"] = r.Location
}
if r.Tags != nil {
objectMap["tags"] = r.Tags
}
if r.Sku != nil {
objectMap["sku"] = r.Sku
}
return json.Marshal(objectMap)
}
// ResourceListKeys namespace/NotificationHub Connection String
type ResourceListKeys struct {
autorest.Response `json:"-"`
// PrimaryConnectionString - PrimaryConnectionString of the AuthorizationRule.
PrimaryConnectionString *string `json:"primaryConnectionString,omitempty"`
// SecondaryConnectionString - SecondaryConnectionString of the created AuthorizationRule
SecondaryConnectionString *string `json:"secondaryConnectionString,omitempty"`
// PrimaryKey - PrimaryKey of the created AuthorizationRule.
PrimaryKey *string `json:"primaryKey,omitempty"`
// SecondaryKey - SecondaryKey of the created AuthorizationRule
SecondaryKey *string `json:"secondaryKey,omitempty"`
// KeyName - KeyName of the created AuthorizationRule
KeyName *string `json:"keyName,omitempty"`
}
// ResourceType description of a NotificationHub Resource.
type ResourceType struct {
autorest.Response `json:"-"`
// Properties - Properties of the NotificationHub.
*Properties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for ResourceType.
func (rt ResourceType) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if rt.Properties != nil {
objectMap["properties"] = rt.Properties
}
if rt.Location != nil {
objectMap["location"] = rt.Location
}
if rt.Tags != nil {
objectMap["tags"] = rt.Tags
}
if rt.Sku != nil {
objectMap["sku"] = rt.Sku
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for ResourceType struct.
func (rt *ResourceType) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var properties Properties
err = json.Unmarshal(*v, &properties)
if err != nil {
return err
}
rt.Properties = &properties
}
case "id":
if v != nil {
var ID string
err = json.Unmarshal(*v, &ID)
if err != nil {
return err
}
rt.ID = &ID
}
case "name":
if v != nil {
var name string
err = json.Unmarshal(*v, &name)
if err != nil {
return err
}
rt.Name = &name
}
case "type":
if v != nil {
var typeVar string
err = json.Unmarshal(*v, &typeVar)
if err != nil {
return err
}
rt.Type = &typeVar
}
case "location":
if v != nil {
var location string
err = json.Unmarshal(*v, &location)
if err != nil {
return err
}
rt.Location = &location
}
case "tags":
if v != nil {
var tags map[string]*string
err = json.Unmarshal(*v, &tags)
if err != nil {
return err
}
rt.Tags = tags
}
case "sku":
if v != nil {
var sku Sku
err = json.Unmarshal(*v, &sku)
if err != nil {
return err
}
rt.Sku = &sku
}
}
}
return nil
}
// SharedAccessAuthorizationRuleCreateOrUpdateParameters parameters supplied to the CreateOrUpdate Namespace
// AuthorizationRules.
type SharedAccessAuthorizationRuleCreateOrUpdateParameters struct {
// Properties - Properties of the Namespace AuthorizationRules.
Properties *SharedAccessAuthorizationRuleProperties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for SharedAccessAuthorizationRuleCreateOrUpdateParameters.
func (saarcoup SharedAccessAuthorizationRuleCreateOrUpdateParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if saarcoup.Properties != nil {
objectMap["properties"] = saarcoup.Properties
}
if saarcoup.Location != nil {
objectMap["location"] = saarcoup.Location
}
if saarcoup.Tags != nil {
objectMap["tags"] = saarcoup.Tags
}
if saarcoup.Sku != nil {
objectMap["sku"] = saarcoup.Sku
}
return json.Marshal(objectMap)
}
// SharedAccessAuthorizationRuleListResult the response of the List Namespace operation.
type SharedAccessAuthorizationRuleListResult struct {
autorest.Response `json:"-"`
// Value - Result of the List AuthorizationRules operation.
Value *[]SharedAccessAuthorizationRuleResource `json:"value,omitempty"`
// NextLink - Link to the next set of results. Not empty if Value contains incomplete list of AuthorizationRules
NextLink *string `json:"nextLink,omitempty"`
}
// SharedAccessAuthorizationRuleListResultIterator provides access to a complete listing of
// SharedAccessAuthorizationRuleResource values.
type SharedAccessAuthorizationRuleListResultIterator struct {
i int
page SharedAccessAuthorizationRuleListResultPage
}
// NextWithContext advances to the next value. If there was an error making
// the request the iterator does not advance and the error is returned.
func (iter *SharedAccessAuthorizationRuleListResultIterator) NextWithContext(ctx context.Context) (err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/SharedAccessAuthorizationRuleListResultIterator.NextWithContext")
defer func() {
sc := -1
if iter.Response().Response.Response != nil {
sc = iter.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
iter.i++
if iter.i < len(iter.page.Values()) {
return nil
}
err = iter.page.NextWithContext(ctx)
if err != nil {
iter.i--
return err
}
iter.i = 0
return nil
}
// Next advances to the next value. If there was an error making
// the request the iterator does not advance and the error is returned.
// Deprecated: Use NextWithContext() instead.
func (iter *SharedAccessAuthorizationRuleListResultIterator) Next() error {
return iter.NextWithContext(context.Background())
}
// NotDone returns true if the enumeration should be started or is not yet complete.
func (iter SharedAccessAuthorizationRuleListResultIterator) NotDone() bool {
return iter.page.NotDone() && iter.i < len(iter.page.Values())
}
// Response returns the raw server response from the last page request.
func (iter SharedAccessAuthorizationRuleListResultIterator) Response() SharedAccessAuthorizationRuleListResult {
return iter.page.Response()
}
// Value returns the current value or a zero-initialized value if the
// iterator has advanced beyond the end of the collection.
func (iter SharedAccessAuthorizationRuleListResultIterator) Value() SharedAccessAuthorizationRuleResource {
if !iter.page.NotDone() {
return SharedAccessAuthorizationRuleResource{}
}
return iter.page.Values()[iter.i]
}
// Creates a new instance of the SharedAccessAuthorizationRuleListResultIterator type.
func NewSharedAccessAuthorizationRuleListResultIterator(page SharedAccessAuthorizationRuleListResultPage) SharedAccessAuthorizationRuleListResultIterator {
return SharedAccessAuthorizationRuleListResultIterator{page: page}
}
// IsEmpty returns true if the ListResult contains no values.
func (saarlr SharedAccessAuthorizationRuleListResult) IsEmpty() bool {
return saarlr.Value == nil || len(*saarlr.Value) == 0
}
// hasNextLink returns true if the NextLink is not empty.
func (saarlr SharedAccessAuthorizationRuleListResult) hasNextLink() bool {
return saarlr.NextLink != nil && len(*saarlr.NextLink) != 0
}
// sharedAccessAuthorizationRuleListResultPreparer prepares a request to retrieve the next set of results.
// It returns nil if no more results exist.
func (saarlr SharedAccessAuthorizationRuleListResult) sharedAccessAuthorizationRuleListResultPreparer(ctx context.Context) (*http.Request, error) {
if !saarlr.hasNextLink() {
return nil, nil
}
return autorest.Prepare((&http.Request{}).WithContext(ctx),
autorest.AsJSON(),
autorest.AsGet(),
autorest.WithBaseURL(to.String(saarlr.NextLink)))
}
// SharedAccessAuthorizationRuleListResultPage contains a page of SharedAccessAuthorizationRuleResource values.
type SharedAccessAuthorizationRuleListResultPage struct {
fn func(context.Context, SharedAccessAuthorizationRuleListResult) (SharedAccessAuthorizationRuleListResult, error)
saarlr SharedAccessAuthorizationRuleListResult
}
// NextWithContext advances to the next page of values. If there was an error making
// the request the page does not advance and the error is returned.
func (page *SharedAccessAuthorizationRuleListResultPage) NextWithContext(ctx context.Context) (err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/SharedAccessAuthorizationRuleListResultPage.NextWithContext")
defer func() {
sc := -1
if page.Response().Response.Response != nil {
sc = page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
for {
next, err := page.fn(ctx, page.saarlr)
if err != nil {
return err
}
page.saarlr = next
if !next.hasNextLink() || !next.IsEmpty() {
break
}
}
return nil
}
// Next advances to the next page of values. If there was an error making
// the request the page does not advance and the error is returned.
// Deprecated: Use NextWithContext() instead.
func (page *SharedAccessAuthorizationRuleListResultPage) Next() error {
return page.NextWithContext(context.Background())
}
// NotDone returns true if the page enumeration should be started or is not yet complete.
func (page SharedAccessAuthorizationRuleListResultPage) NotDone() bool {
return !page.saarlr.IsEmpty()
}
// Response returns the raw server response from the last page request.
func (page SharedAccessAuthorizationRuleListResultPage) Response() SharedAccessAuthorizationRuleListResult {
return page.saarlr
}
// Values returns the slice of values for the current page or nil if there are no values.
func (page SharedAccessAuthorizationRuleListResultPage) Values() []SharedAccessAuthorizationRuleResource {
if page.saarlr.IsEmpty() {
return nil
}
return *page.saarlr.Value
}
// Creates a new instance of the SharedAccessAuthorizationRuleListResultPage type.
func NewSharedAccessAuthorizationRuleListResultPage(getNextPage func(context.Context, SharedAccessAuthorizationRuleListResult) (SharedAccessAuthorizationRuleListResult, error)) SharedAccessAuthorizationRuleListResultPage {
return SharedAccessAuthorizationRuleListResultPage{fn: getNextPage}
}
// SharedAccessAuthorizationRuleProperties sharedAccessAuthorizationRule properties.
type SharedAccessAuthorizationRuleProperties struct {
// Rights - The rights associated with the rule.
Rights *[]AccessRights `json:"rights,omitempty"`
}
// SharedAccessAuthorizationRuleResource description of a Namespace AuthorizationRules.
type SharedAccessAuthorizationRuleResource struct {
autorest.Response `json:"-"`
// SharedAccessAuthorizationRuleProperties - Properties of the Namespace AuthorizationRule.
*SharedAccessAuthorizationRuleProperties `json:"properties,omitempty"`
// ID - READ-ONLY; Resource Id
ID *string `json:"id,omitempty"`
// Name - READ-ONLY; Resource name
Name *string `json:"name,omitempty"`
// Type - READ-ONLY; Resource type
Type *string `json:"type,omitempty"`
// Location - Resource location
Location *string `json:"location,omitempty"`
// Tags - Resource tags
Tags map[string]*string `json:"tags"`
// Sku - The sku of the created namespace
Sku *Sku `json:"sku,omitempty"`
}
// MarshalJSON is the custom marshaler for SharedAccessAuthorizationRuleResource.
func (saarr SharedAccessAuthorizationRuleResource) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if saarr.SharedAccessAuthorizationRuleProperties != nil {
objectMap["properties"] = saarr.SharedAccessAuthorizationRuleProperties
}
if saarr.Location != nil {
objectMap["location"] = saarr.Location
}
if saarr.Tags != nil {
objectMap["tags"] = saarr.Tags
}
if saarr.Sku != nil {
objectMap["sku"] = saarr.Sku
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for SharedAccessAuthorizationRuleResource struct.
func (saarr *SharedAccessAuthorizationRuleResource) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var sharedAccessAuthorizationRuleProperties SharedAccessAuthorizationRuleProperties
err = json.Unmarshal(*v, &sharedAccessAuthorizationRuleProperties)
if err != nil {
return err
}
saarr.SharedAccessAuthorizationRuleProperties = &sharedAccessAuthorizationRuleProperties
}
case "id":
if v != nil {
var ID string
err = json.Unmarshal(*v, &ID)
if err != nil {
return err
}
saarr.ID = &ID
}
case "name":
if v != nil {
var name string
err = json.Unmarshal(*v, &name)
if err != nil {
return err
}
saarr.Name = &name
}
case "type":
if v != nil {
var typeVar string
err = json.Unmarshal(*v, &typeVar)
if err != nil {
return err
}
saarr.Type = &typeVar
}
case "location":
if v != nil {
var location string
err = json.Unmarshal(*v, &location)
if err != nil {
return err
}
saarr.Location = &location
}
case "tags":
if v != nil {
var tags map[string]*string
err = json.Unmarshal(*v, &tags)
if err != nil {
return err
}
saarr.Tags = tags
}
case "sku":
if v != nil {
var sku Sku
err = json.Unmarshal(*v, &sku)
if err != nil {
return err
}
saarr.Sku = &sku
}
}
}
return nil
}
// Sku the Sku description for a namespace
type Sku struct {
// Name - Name of the notification hub sku. Possible values include: 'Free', 'Basic', 'Standard'
Name SkuName `json:"name,omitempty"`
// Tier - The tier of particular sku
Tier *string `json:"tier,omitempty"`
// Size - The Sku size
Size *string `json:"size,omitempty"`
// Family - The Sku Family
Family *string `json:"family,omitempty"`
// Capacity - The capacity of the resource
Capacity *int32 `json:"capacity,omitempty"`
}
// SubResource ...
type SubResource struct {
// ID - Resource Id
ID *string `json:"id,omitempty"`
}
// WnsCredential description of a NotificationHub WnsCredential.
type WnsCredential struct {
// WnsCredentialProperties - Properties of NotificationHub WnsCredential.
*WnsCredentialProperties `json:"properties,omitempty"`
}
// MarshalJSON is the custom marshaler for WnsCredential.
func (wc WnsCredential) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
if wc.WnsCredentialProperties != nil {
objectMap["properties"] = wc.WnsCredentialProperties
}
return json.Marshal(objectMap)
}
// UnmarshalJSON is the custom unmarshaler for WnsCredential struct.
func (wc *WnsCredential) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
case "properties":
if v != nil {
var wnsCredentialProperties WnsCredentialProperties
err = json.Unmarshal(*v, &wnsCredentialProperties)
if err != nil {
return err
}
wc.WnsCredentialProperties = &wnsCredentialProperties
}
}
}
return nil
}
// WnsCredentialProperties description of a NotificationHub WnsCredential.
type WnsCredentialProperties struct {
// PackageSid - The package ID for this credential.
PackageSid *string `json:"packageSid,omitempty"`
// SecretKey - The secret key.
SecretKey *string `json:"secretKey,omitempty"`
// WindowsLiveEndpoint - The Windows Live endpoint.
WindowsLiveEndpoint *string `json:"windowsLiveEndpoint,omitempty"`
} | // NamespaceListResult the response of the List Namespace operation. |
api_op_ListMobileDeviceAccessOverrides.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package workmail
import (
"context"
"fmt"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/workmail/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Lists all the mobile device access overrides for any given combination of
// WorkMail organization, user, or device.
func (c *Client) ListMobileDeviceAccessOverrides(ctx context.Context, params *ListMobileDeviceAccessOverridesInput, optFns ...func(*Options)) (*ListMobileDeviceAccessOverridesOutput, error) {
if params == nil {
params = &ListMobileDeviceAccessOverridesInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListMobileDeviceAccessOverrides", params, optFns, c.addOperationListMobileDeviceAccessOverridesMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListMobileDeviceAccessOverridesOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListMobileDeviceAccessOverridesInput struct {
// The Amazon WorkMail organization under which to list mobile device access
// overrides.
//
// This member is required.
OrganizationId *string
// The mobile device to which the access override applies.
DeviceId *string
// The maximum number of results to return in a single call.
MaxResults *int32
// The token to use to retrieve the next page of results. The first call does not
// require a token.
NextToken *string
// The WorkMail user under which you list the mobile device access overrides.
// Accepts the following types of user identities:
//
// * User ID:
// 12345678-1234-1234-1234-123456789012 or
// S-1-1-12-1234567890-123456789-123456789-1234
//
// * Email address:
// [email protected]
//
// * User name: user
UserId *string
noSmithyDocumentSerde
}
type ListMobileDeviceAccessOverridesOutput struct {
// The token to use to retrieve the next page of results. The value is “null” when
// there are no more results to return.
NextToken *string
// The list of mobile device access overrides that exist for the specified Amazon
// WorkMail organization and user.
Overrides []types.MobileDeviceAccessOverride
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationListMobileDeviceAccessOverridesMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpListMobileDeviceAccessOverrides{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpListMobileDeviceAccessOverrides{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpListMobileDeviceAccessOverridesValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListMobileDeviceAccessOverrides(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
// ListMobileDeviceAccessOverridesAPIClient is a client that implements the
// ListMobileDeviceAccessOverrides operation.
type ListMobileDeviceAccessOverridesAPIClient interface {
ListMobileDeviceAccessOverrides(context.Context, *ListMobileDeviceAccessOverridesInput, ...func(*Options)) (*ListMobileDeviceAccessOverridesOutput, error)
}
var _ ListMobileDeviceAccessOverridesAPIClient = (*Client)(nil)
// ListMobileDeviceAccessOverridesPaginatorOptions is the paginator options for
// ListMobileDeviceAccessOverrides
type ListMobileDeviceAccessOverridesPaginatorOptions struct {
// The maximum number of results to return in a single call.
Limit int32
// Set to true if pagination should stop if the service returns a pagination token
// that matches the most recent token provided to the service.
StopOnDuplicateToken bool
}
// ListMobileDeviceAccessOverridesPaginator is a paginator for
// ListMobileDeviceAccessOverrides
type ListMobileDeviceAccessOverridesPaginator struct {
options ListMobileDeviceAccessOverridesPaginatorOptions
client ListMobileDeviceAccessOverridesAPIClient
params *ListMobileDeviceAccessOverridesInput
nextToken *string
firstPage bool
}
// NewListMobileDeviceAccessOverridesPaginator returns a new
// ListMobileDeviceAccessOverridesPaginator
func NewListMobileDeviceAccessOverridesPaginator(client ListMobileDeviceAccessOverridesAPIClient, params *ListMobileDeviceAccessOverridesInput, optFns ...func(*ListMobileDeviceAccessOverridesPaginatorOptions)) *ListMobileDeviceAccessOverridesPaginator {
if params == nil {
params = &ListMobileDeviceAccessOverridesInput{}
}
options := ListMobileDeviceAccessOverridesPaginatorOptions{}
if params.MaxResults != nil {
options.Limit = *params.MaxResults
}
for _, fn := range optFns {
fn(&options)
}
return &ListMobileDeviceAccessOverridesPaginator{
options: options,
client: client,
params: params,
firstPage: true,
}
}
// HasMorePages returns a boolean indicating whether more pages are available
func (p *ListMobileDeviceAccessOverridesPaginator) HasMorePages() bool {
return p.firstPage || p.nextToken != nil
}
// NextPage retrieves the next ListMobileDeviceAccessOverrides page.
func (p *ListMobileDeviceAccessOverridesPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListMobileDeviceAccessOverridesOutput, error) {
if !p.HasMorePages() {
return nil, fmt.Errorf("no more pages available")
}
params := *p.params
params.NextToken = p.nextToken
var limit *int32
if p.options.Limit > 0 {
limit = &p.options.Limit
}
params.MaxResults = limit
result, err := p.client.ListMobileDeviceAccessOverrides(ctx, ¶ms, optFns...)
if err != nil {
return nil, err
}
p.firstPage = false
prevToken := p.nextToken
p.nextToken = result.NextToken
if p.options.StopOnDuplicateToken && prevToken != nil && p.nextToken != nil && *prevToken == *p.nextToken {
| eturn result, nil
}
func newServiceMetadataMiddleware_opListMobileDeviceAccessOverrides(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "workmail",
OperationName: "ListMobileDeviceAccessOverrides",
}
}
| p.nextToken = nil
}
r |
data_loader.py | # Copyright 2018 SciNet (https://github.com/eth-nn-physics/nn_physical_concepts)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle
import gzip
import io
import numpy as np
def load(validation_size_p, file_name):
"""
Params:
validation_size_p: percentage of data to be used for validation
file_name (str): File containing the data
""" | states = np.array(states)
train_val_separation = int(len(data[0]) * (1 - validation_size_p / 100.))
training_data = [data[i][:train_val_separation] for i in [0, 1, 2]]
training_states = states[:train_val_separation]
validation_data = [data[i][train_val_separation:] for i in [0, 1, 2]]
validation_states = states[train_val_separation:]
f.close()
return (training_data, validation_data, training_states, validation_states, params) | f = gzip.open(io.data_path + file_name + ".plk.gz", 'rb')
data, states, params = cPickle.load(f) |
test_apikey.py | import uuid
from ace.api.apikey import ApiKey
import pytest
key_value = "5dbc0b66-fcf7-4b98-a0f4-59e523dbba92"
key_name = "test"
key_description = "test description"
key_is_admin = False
@pytest.mark.parametrize(
"key",
[
ApiKey(api_key=key_value, name=key_name, description=key_description, is_admin=key_is_admin),
ApiKey(api_key=key_value, name=key_name, description=key_description),
ApiKey(api_key=key_value, name=key_name),
],
)
@pytest.mark.unit
def test_ApiKey(key):
model = key.to_model()
assert model.api_key == key.api_key
assert model.name == key.name
assert model.description == key.description
assert model.is_admin == key.is_admin
assert key == ApiKey(**key.to_model().dict())
assert key == ApiKey.from_dict(key.to_dict())
assert key == ApiKey.from_json(key.to_json())
@pytest.mark.unit
def test_invalid_ApiKey():
| with pytest.raises(TypeError):
ApiKey()
with pytest.raises(TypeError):
ApiKey(api_key=key_value)
with pytest.raises(TypeError):
ApiKey(api_key=key_value, name="") |
|
object_nav_task.py | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from typing import Any, List, Optional
import attr
from cv2 import log
import numpy as np
from gym import spaces
from habitat.config import Config
from habitat.core.dataset import SceneState
from habitat.core.logging import logger
from habitat.core.registry import registry
from habitat.core.simulator import AgentState, Sensor, SensorTypes
from habitat.core.utils import not_none_validator
from habitat.tasks.nav.nav import (
NavigationEpisode,
NavigationGoal,
NavigationTask
)
try:
from habitat.datasets.object_nav.object_nav_dataset import (
ObjectNavDatasetV1,
)
except ImportError:
pass
task_cat2mpcat40 = [
3, # ('chair', 2, 0)
5, # ('table', 4, 1)
6, # ('picture', 5, 2)
7, # ('cabinet', 6, 3)
8, # ('cushion', 7, 4)
10, # ('sofa', 9, 5),
11, # ('bed', 10, 6)
13, # ('chest_of_drawers', 12, 7),
14, # ('plant', 13, 8)
15, # ('sink', 14, 9)
18, # ('toilet', 17, 10),
19, # ('stool', 18, 11),
20, # ('towel', 19, 12)
22, # ('tv_monitor', 21, 13)
23, # ('shower', 22, 14)
25, # ('bathtub', 24, 15)
26, # ('counter', 25, 16),
27, # ('fireplace', 26, 17),
33, # ('gym_equipment', 32, 18),
34, # ('seating', 33, 19),
38, # ('clothes', 37, 20),
43, # ('foodstuff', 42, 21),
44, # ('stationery', 43, 22),
45, # ('fruit', 44, 23),
46, # ('plaything', 45, 24),
47, # ('hand_tool', 46, 25),
48, # ('game_equipment', 47, 26),
49, # ('kitchenware', 48, 27)
]
mapping_mpcat40_to_goal21 = {
3: 1,
5: 2,
6: 3,
7: 4,
8: 5,
10: 6,
11: 7,
13: 8,
14: 9,
15: 10,
18: 11,
19: 12,
20: 13,
22: 14,
23: 15,
25: 16,
26: 17,
27: 18,
33: 19,
34: 20,
38: 21,
43: 22, # ('foodstuff', 42, task_cat: 21)
44: 28, # ('stationery', 43, task_cat: 22)
45: 26, # ('fruit', 44, task_cat: 23)
46: 25, # ('plaything', 45, task_cat: 24)
47: 24, # ('hand_tool', 46, task_cat: 25)
48: 23, # ('game_equipment', 47, task_cat: 26)
49: 27, # ('kitchenware', 48, task_cat: 27)
}
@attr.s(auto_attribs=True, kw_only=True)
class AgentStateSpec:
r"""Agent data specifications that capture states of agent and sensor in replay state.
"""
position: Optional[List[float]] = attr.ib(default=None)
rotation: Optional[List[float]] = attr.ib(default=None)
sensor_data: Optional[dict] = attr.ib(default=None)
@attr.s(auto_attribs=True, kw_only=True)
class ReplayActionSpec:
r"""Replay specifications that capture metadata associated with action.
"""
action: str = attr.ib(default=None, validator=not_none_validator)
agent_state: Optional[AgentStateSpec] = attr.ib(default=None)
@attr.s(auto_attribs=True, kw_only=True)
class ObjectGoalNavEpisode(NavigationEpisode):
r"""ObjectGoal Navigation Episode
:param object_category: Category of the obect
"""
object_category: Optional[str] = None
reference_replay: Optional[List[ReplayActionSpec]] = None
scene_state: Optional[List[SceneState]] = None
is_thda: Optional[bool] = False
scene_dataset: Optional[str] = "mp3d"
@property
def goals_key(self) -> str:
r"""The key to retrieve the goals"""
return f"{os.path.basename(self.scene_id)}_{self.object_category}"
@attr.s(auto_attribs=True)
class ObjectViewLocation:
r"""ObjectViewLocation provides information about a position around an object goal
usually that is navigable and the object is visible with specific agent
configuration that episode's dataset was created.
that is target for
navigation. That can be specify object_id, position and object
category. An important part for metrics calculation are view points that
describe success area for the navigation.
Args:
agent_state: navigable AgentState with a position and a rotation where
the object is visible.
iou: an intersection of a union of the object and a rectangle in the
center of view. This metric is used to evaluate how good is the object
view form current position. Higher iou means better view, iou equals
1.0 if whole object is inside of the rectangle and no pixel inside
the rectangle belongs to anything except the object.
"""
agent_state: AgentState
iou: Optional[float]
@attr.s(auto_attribs=True, kw_only=True)
class ObjectGoal(NavigationGoal):
r"""Object goal provides information about an object that is target for
navigation. That can be specify object_id, position and object
category. An important part for metrics calculation are view points that
describe success area for the navigation.
Args:
object_id: id that can be used to retrieve object from the semantic
scene annotation
object_name: name of the object
object_category: object category name usually similar to scene semantic
categories
room_id: id of a room where object is located, can be used to retrieve
room from the semantic scene annotation
room_name: name of the room, where object is located
view_points: navigable positions around the object with specified
proximity of the object surface used for navigation metrics calculation.
The object is visible from these positions.
"""
object_id: str = attr.ib(default=None, validator=not_none_validator)
object_name: Optional[str] = None
object_name_id: Optional[int] = None
object_category: Optional[str] = None
room_id: Optional[str] = None
room_name: Optional[str] = None
view_points: Optional[List[ObjectViewLocation]] = None
@registry.register_sensor
class ObjectGoalSensor(Sensor):
r"""A sensor for Object Goal specification as observations which is used in
ObjectGoal Navigation. The goal is expected to be specified by object_id or
semantic category id.
For the agent in simulator the forward direction is along negative-z.
In polar coordinate format the angle returned is azimuth to the goal.
Args:
sim: a reference to the simulator for calculating task observations.
config: a config for the ObjectGoalSensor sensor. Can contain field
GOAL_SPEC that specifies which id use for goal specification,
GOAL_SPEC_MAX_VAL the maximum object_id possible used for
observation space definition.
dataset: a Object Goal navigation dataset that contains dictionaries
of categories id to text mapping.
"""
cls_uuid: str = "objectgoal"
def __init__(
self,
sim,
config: Config,
dataset: "ObjectNavDatasetV1",
*args: Any,
**kwargs: Any,
):
self._sim = sim
self._dataset = dataset
super().__init__(config=config)
def _get_uuid(self, *args: Any, **kwargs: Any) -> str:
return self.cls_uuid
def _get_sensor_type(self, *args: Any, **kwargs: Any):
return SensorTypes.SEMANTIC
def _get_observation_space(self, *args: Any, **kwargs: Any):
sensor_shape = (1,)
max_value = self.config.GOAL_SPEC_MAX_VAL - 1
if self.config.GOAL_SPEC == "TASK_CATEGORY_ID":
max_value = max(
self._dataset.category_to_task_category_id.values()
)
logger.info("max object cat: {}".format(max_value))
logger.info("cats: {}".format(self._dataset.category_to_task_category_id.values()))
return spaces.Box(
low=0, high=max_value, shape=sensor_shape, dtype=np.int64
)
def get_observation(
self,
observations,
*args: Any,
episode: ObjectGoalNavEpisode,
**kwargs: Any,
) -> Optional[int]:
if len(episode.goals) == 0:
logger.error(
f"No goal specified for episode {episode.episode_id}."
)
return None
if not isinstance(episode.goals[0], ObjectGoal):
logger.error(
f"First goal should be ObjectGoal, episode {episode.episode_id}."
)
return None
category_name = episode.object_category
if self.config.GOAL_SPEC == "TASK_CATEGORY_ID":
return np.array(
[self._dataset.category_to_task_category_id[category_name]],
dtype=np.int64,
)
elif self.config.GOAL_SPEC == "OBJECT_ID": | assert isinstance(obj_goal, ObjectGoal) # for type checking
return np.array([obj_goal.object_name_id], dtype=np.int64)
else:
raise RuntimeError(
"Wrong GOAL_SPEC specified for ObjectGoalSensor."
)
@registry.register_task(name="ObjectNav-v1")
class ObjectNavigationTask(NavigationTask):
r"""An Object Navigation Task class for a task specific methods.
Used to explicitly state a type of the task in config.
"""
_is_episode_active: bool
_prev_action: int
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self._is_episode_active = False
def overwrite_sim_config(self, sim_config, episode):
super().overwrite_sim_config(sim_config, episode)
sim_config.defrost()
sim_config.scene_state = episode.scene_state
sim_config.freeze()
return sim_config
def _check_episode_is_active(self, action, *args: Any, **kwargs: Any) -> bool:
return not getattr(self, "is_stop_called", False) | obj_goal = episode.goals[0] |
test_aio.rs | use libc::c_int;
use nix::{Error, Result};
use nix::errno::*;
use nix::sys::aio::*;
use nix::sys::signal::*;
use nix::sys::time::{TimeSpec, TimeValLike};
use std::io::{Write, Read, Seek, SeekFrom};
use std::ops::Deref;
use std::os::unix::io::AsRawFd;
use std::rc::Rc;
use std::sync::Mutex;
use std::sync::atomic::{AtomicBool, Ordering};
use std::{thread, time};
use tempfile::tempfile;
// Helper that polls an AioCb for completion or error
fn poll_aio(mut aiocb: &mut AioCb) -> Result<()> {
loop {
let err = aiocb.error();
if err != Err(Error::from(Errno::EINPROGRESS)) { return err; };
thread::sleep(time::Duration::from_millis(10));
}
}
// Tests AioCb.cancel. We aren't trying to test the OS's implementation, only our
// bindings. So it's sufficient to check that AioCb.cancel returned any
// AioCancelStat value.
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_cancel() {
let wbuf: &'static [u8] = b"CDEF";
let f = tempfile().unwrap();
let mut aiocb = AioCb::from_slice( f.as_raw_fd(),
0, //offset
&wbuf,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.write().unwrap();
let err = aiocb.error();
assert!(err == Ok(()) || err == Err(Error::from(Errno::EINPROGRESS)));
let cancelstat = aiocb.cancel();
assert!(cancelstat.is_ok());
// Wait for aiocb to complete, but don't care whether it succeeded
let _ = poll_aio(&mut aiocb);
let _ = aiocb.aio_return();
}
// Tests using aio_cancel_all for all outstanding IOs.
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_aio_cancel_all() {
let wbuf: &'static [u8] = b"CDEF";
let f = tempfile().unwrap();
let mut aiocb = AioCb::from_slice(f.as_raw_fd(),
0, //offset
&wbuf,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.write().unwrap();
let err = aiocb.error();
assert!(err == Ok(()) || err == Err(Error::from(Errno::EINPROGRESS)));
let cancelstat = aio_cancel_all(f.as_raw_fd());
assert!(cancelstat.is_ok());
// Wait for aiocb to complete, but don't care whether it succeeded
let _ = poll_aio(&mut aiocb);
let _ = aiocb.aio_return();
}
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_fsync() {
const INITIAL: &'static [u8] = b"abcdef123456";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
let mut aiocb = AioCb::from_fd( f.as_raw_fd(),
0, //priority
SigevNotify::SigevNone);
let err = aiocb.fsync(AioFsyncMode::O_SYNC);
assert!(err.is_ok());
poll_aio(&mut aiocb).unwrap();
aiocb.aio_return().unwrap();
}
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_aio_suspend() {
const INITIAL: &'static [u8] = b"abcdef123456";
const WBUF: &'static [u8] = b"CDEF";
let timeout = TimeSpec::seconds(10);
let rbuf = Rc::new(vec![0; 4].into_boxed_slice());
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
let mut wcb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&mut WBUF,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_WRITE);
let mut rcb = AioCb::from_boxed_slice( f.as_raw_fd(),
8, //offset
rbuf.clone(),
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_READ);
wcb.write().unwrap();
rcb.read().unwrap();
loop {
{
let cbbuf = [&wcb, &rcb];
assert!(aio_suspend(&cbbuf[..], Some(timeout)).is_ok());
}
if rcb.error() != Err(Error::from(Errno::EINPROGRESS)) &&
wcb.error() != Err(Error::from(Errno::EINPROGRESS)) {
break
}
}
assert!(wcb.aio_return().unwrap() as usize == WBUF.len());
assert!(rcb.aio_return().unwrap() as usize == WBUF.len());
}
// Test a simple aio operation with no completion notification. We must poll
// for completion
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_read() {
const INITIAL: &'static [u8] = b"abcdef123456";
let rbuf = Rc::new(vec![0; 4].into_boxed_slice());
const EXPECT: &'static [u8] = b"cdef";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
{
let mut aiocb = AioCb::from_boxed_slice( f.as_raw_fd(),
2, //offset
rbuf.clone(),
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.read().unwrap();
let err = poll_aio(&mut aiocb);
assert!(err == Ok(()));
assert!(aiocb.aio_return().unwrap() as usize == EXPECT.len());
}
assert!(EXPECT == rbuf.deref().deref());
}
// Tests from_mut_slice
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_read_into_mut_slice() {
const INITIAL: &'static [u8] = b"abcdef123456";
let mut rbuf = vec![0; 4];
const EXPECT: &'static [u8] = b"cdef";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
{
let mut aiocb = AioCb::from_mut_slice( f.as_raw_fd(),
2, //offset
&mut rbuf,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.read().unwrap();
let err = poll_aio(&mut aiocb);
assert!(err == Ok(()));
assert!(aiocb.aio_return().unwrap() as usize == EXPECT.len()); | }
// Test reading into an immutable buffer. It should fail
// FIXME: This test fails to panic on Linux/musl
#[test]
#[should_panic(expected = "Can't read into an immutable buffer")]
#[cfg_attr(target_env = "musl", ignore)]
fn test_read_immutable_buffer() {
let rbuf: &'static [u8] = b"CDEF";
let f = tempfile().unwrap();
let mut aiocb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&rbuf,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.read().unwrap();
}
// Test a simple aio operation with no completion notification. We must poll
// for completion. Unlike test_aio_read, this test uses AioCb::from_slice
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_write() {
const INITIAL: &'static [u8] = b"abcdef123456";
let wbuf = "CDEF".to_string().into_bytes();
let mut rbuf = Vec::new();
const EXPECT: &'static [u8] = b"abCDEF123456";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
let mut aiocb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&wbuf,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.write().unwrap();
let err = poll_aio(&mut aiocb);
assert!(err == Ok(()));
assert!(aiocb.aio_return().unwrap() as usize == wbuf.len());
f.seek(SeekFrom::Start(0)).unwrap();
let len = f.read_to_end(&mut rbuf).unwrap();
assert!(len == EXPECT.len());
assert!(rbuf == EXPECT);
}
lazy_static! {
pub static ref SIGNALED: AtomicBool = AtomicBool::new(false);
// protects access to SIGUSR2 handlers, not just SIGNALED
pub static ref SIGUSR2_MTX: Mutex<()> = Mutex::new(());
}
extern fn sigfunc(_: c_int) {
SIGNALED.store(true, Ordering::Relaxed);
}
// Test an aio operation with completion delivered by a signal
// FIXME: This test is ignored on mips because of failures in qemu in CI
#[test]
#[cfg_attr(any(all(target_env = "musl", target_arch = "x86_64"), target_arch = "mips"), ignore)]
fn test_write_sigev_signal() {
let _ = SIGUSR2_MTX.lock().expect("Mutex got poisoned by another test");
let sa = SigAction::new(SigHandler::Handler(sigfunc),
SA_RESETHAND,
SigSet::empty());
SIGNALED.store(false, Ordering::Relaxed);
unsafe { sigaction(Signal::SIGUSR2, &sa) }.unwrap();
const INITIAL: &'static [u8] = b"abcdef123456";
const WBUF: &'static [u8] = b"CDEF";
let mut rbuf = Vec::new();
const EXPECT: &'static [u8] = b"abCDEF123456";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
let mut aiocb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&WBUF,
0, //priority
SigevNotify::SigevSignal {
signal: Signal::SIGUSR2,
si_value: 0 //TODO: validate in sigfunc
},
LioOpcode::LIO_NOP);
aiocb.write().unwrap();
while SIGNALED.load(Ordering::Relaxed) == false {
thread::sleep(time::Duration::from_millis(10));
}
assert!(aiocb.aio_return().unwrap() as usize == WBUF.len());
f.seek(SeekFrom::Start(0)).unwrap();
let len = f.read_to_end(&mut rbuf).unwrap();
assert!(len == EXPECT.len());
assert!(rbuf == EXPECT);
}
// Test lio_listio with LIO_WAIT, so all AIO ops should be complete by the time
// lio_listio returns.
#[test]
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_lio_listio_wait() {
const INITIAL: &'static [u8] = b"abcdef123456";
const WBUF: &'static [u8] = b"CDEF";
let rbuf = Rc::new(vec![0; 4].into_boxed_slice());
let mut rbuf2 = Vec::new();
const EXPECT: &'static [u8] = b"abCDEF123456";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
{
let mut wcb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&WBUF,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_WRITE);
let mut rcb = AioCb::from_boxed_slice( f.as_raw_fd(),
8, //offset
rbuf.clone(),
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_READ);
let err = lio_listio(LioMode::LIO_WAIT, &[&mut wcb, &mut rcb], SigevNotify::SigevNone);
err.expect("lio_listio failed");
assert!(wcb.aio_return().unwrap() as usize == WBUF.len());
assert!(rcb.aio_return().unwrap() as usize == WBUF.len());
}
assert!(rbuf.deref().deref() == b"3456");
f.seek(SeekFrom::Start(0)).unwrap();
let len = f.read_to_end(&mut rbuf2).unwrap();
assert!(len == EXPECT.len());
assert!(rbuf2 == EXPECT);
}
// Test lio_listio with LIO_NOWAIT and no SigEvent, so we must use some other
// mechanism to check for the individual AioCb's completion.
#[test]
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_lio_listio_nowait() {
const INITIAL: &'static [u8] = b"abcdef123456";
const WBUF: &'static [u8] = b"CDEF";
let rbuf = Rc::new(vec![0; 4].into_boxed_slice());
let mut rbuf2 = Vec::new();
const EXPECT: &'static [u8] = b"abCDEF123456";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
{
let mut wcb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&WBUF,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_WRITE);
let mut rcb = AioCb::from_boxed_slice( f.as_raw_fd(),
8, //offset
rbuf.clone(),
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_READ);
let err = lio_listio(LioMode::LIO_NOWAIT, &[&mut wcb, &mut rcb], SigevNotify::SigevNone);
err.expect("lio_listio failed");
poll_aio(&mut wcb).unwrap();
poll_aio(&mut rcb).unwrap();
assert!(wcb.aio_return().unwrap() as usize == WBUF.len());
assert!(rcb.aio_return().unwrap() as usize == WBUF.len());
}
assert!(rbuf.deref().deref() == b"3456");
f.seek(SeekFrom::Start(0)).unwrap();
let len = f.read_to_end(&mut rbuf2).unwrap();
assert!(len == EXPECT.len());
assert!(rbuf2 == EXPECT);
}
// Test lio_listio with LIO_NOWAIT and a SigEvent to indicate when all AioCb's
// are complete.
// FIXME: This test is ignored on mips because of failures in qemu in CI.
#[test]
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
#[cfg_attr(any(target_arch = "mips", target_env = "musl"), ignore)]
fn test_lio_listio_signal() {
let _ = SIGUSR2_MTX.lock().expect("Mutex got poisoned by another test");
const INITIAL: &'static [u8] = b"abcdef123456";
const WBUF: &'static [u8] = b"CDEF";
let rbuf = Rc::new(vec![0; 4].into_boxed_slice());
let mut rbuf2 = Vec::new();
const EXPECT: &'static [u8] = b"abCDEF123456";
let mut f = tempfile().unwrap();
let sa = SigAction::new(SigHandler::Handler(sigfunc),
SA_RESETHAND,
SigSet::empty());
let sigev_notify = SigevNotify::SigevSignal { signal: Signal::SIGUSR2,
si_value: 0 };
f.write(INITIAL).unwrap();
{
let mut wcb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&WBUF,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_WRITE);
let mut rcb = AioCb::from_boxed_slice( f.as_raw_fd(),
8, //offset
rbuf.clone(),
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_READ);
SIGNALED.store(false, Ordering::Relaxed);
unsafe { sigaction(Signal::SIGUSR2, &sa) }.unwrap();
let err = lio_listio(LioMode::LIO_NOWAIT, &[&mut wcb, &mut rcb], sigev_notify);
err.expect("lio_listio failed");
while SIGNALED.load(Ordering::Relaxed) == false {
thread::sleep(time::Duration::from_millis(10));
}
assert!(wcb.aio_return().unwrap() as usize == WBUF.len());
assert!(rcb.aio_return().unwrap() as usize == WBUF.len());
}
assert!(rbuf.deref().deref() == b"3456");
f.seek(SeekFrom::Start(0)).unwrap();
let len = f.read_to_end(&mut rbuf2).unwrap();
assert!(len == EXPECT.len());
assert!(rbuf2 == EXPECT);
}
// Try to use lio_listio to read into an immutable buffer. It should fail
// FIXME: This test fails to panic on Linux/musl
#[test]
#[cfg(not(any(target_os = "ios", target_os = "macos")))]
#[should_panic(expected = "Can't read into an immutable buffer")]
#[cfg_attr(target_env = "musl", ignore)]
fn test_lio_listio_read_immutable() {
let rbuf: &'static [u8] = b"abcd";
let f = tempfile().unwrap();
let mut rcb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&rbuf,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_READ);
let _ = lio_listio(LioMode::LIO_NOWAIT, &[&mut rcb], SigevNotify::SigevNone);
}
// Test dropping an AioCb that hasn't yet finished. Behind the scenes, the
// library should wait for the AioCb's completion.
#[test]
#[cfg_attr(all(target_env = "musl", target_arch = "x86_64"), ignore)]
fn test_drop() {
const INITIAL: &'static [u8] = b"abcdef123456";
const WBUF: &'static [u8] = b"CDEF"; //"CDEF".to_string().into_bytes();
let mut rbuf = Vec::new();
const EXPECT: &'static [u8] = b"abCDEF123456";
let mut f = tempfile().unwrap();
f.write(INITIAL).unwrap();
{
let mut aiocb = AioCb::from_slice( f.as_raw_fd(),
2, //offset
&WBUF,
0, //priority
SigevNotify::SigevNone,
LioOpcode::LIO_NOP);
aiocb.write().unwrap();
}
f.seek(SeekFrom::Start(0)).unwrap();
let len = f.read_to_end(&mut rbuf).unwrap();
assert!(len == EXPECT.len());
assert!(rbuf == EXPECT);
} | }
assert!(rbuf == EXPECT); |
update.py | pkgname = "libXrandr" |
||
main_verbq_working.py | import torch
#from imsitu_encoder_verbq import imsitu_encoder
from imsitu_encoder_roleqverbq_embdhz import imsitu_encoder
from imsitu_loader import imsitu_loader_roleq_updated
from imsitu_scorer_log import imsitu_scorer
import json
import model_verbq_working
import os
import utils
import time
import random
#from torchviz import make_dot
#from graphviz import Digraph
def train(model, train_loader, dev_loader, traindev_loader, optimizer, scheduler, max_epoch, model_dir, encoder, gpu_mode, clip_norm, lr_max, model_name, args,eval_frequency=4):
model.train()
train_loss = 0
total_steps = 0
print_freq = 400
dev_score_list = []
time_all = time.time()
if model.gpu_mode >= 0 :
ngpus = 2
device_array = [i for i in range(0,ngpus)]
pmodel = torch.nn.DataParallel(model, device_ids=device_array)
else:
pmodel = model
#pmodel = model
'''if scheduler.get_lr()[0] < lr_max:
scheduler.step()'''
top1 = imsitu_scorer(encoder, 1, 3)
top5 = imsitu_scorer(encoder, 5, 3)
'''print('init param data check :')
for f in model.parameters():
if f.requires_grad:
print(f.data.size())'''
for epoch in range(max_epoch):
#print('current sample : ', i, img.size(), verb.size(), roles.size(), labels.size())
#sizes batch_size*3*height*width, batch*504*1, batch*6*190*1, batch*3*6*lebale_count*1
mx = len(train_loader)
for i, (id, img, verb, labels) in enumerate(train_loader):
#print("epoch{}-{}/{} batches\r".format(epoch,i+1,mx)) ,
t0 = time.time()
t1 = time.time()
total_steps += 1
if gpu_mode >= 0:
img = torch.autograd.Variable(img.cuda())
verb = torch.autograd.Variable(verb.cuda())
labels = torch.autograd.Variable(labels.cuda())
else:
img = torch.autograd.Variable(img)
verb = torch.autograd.Variable(verb)
labels = torch.autograd.Variable(labels)
'''print('all inputs')
print(img)
print('=========================================================================')
print(verb)
print('=========================================================================')
print(roles)
print('=========================================================================')
print(labels)'''
verb_predict, loss = pmodel(img, verb, labels)
#verb_predict, rol1pred, role_predict = pmodel.forward_eval5(img)
#print ("forward time = {}".format(time.time() - t1))
t1 = time.time()
'''g = make_dot(verb_predict, model.state_dict())
g.view()'''
#loss = model.calculate_loss(verb_predict, verb)
#loss = model.calculate_eval_loss_new(verb_predict, verb, rol1pred, labels, args)
#loss = loss_ * random.random() #try random loss
#print ("loss time = {}".format(time.time() - t1))
t1 = time.time()
#print('current loss = ', loss)
if gpu_mode >= 0 :
#loss.backward(torch.ones([2,1]).to(torch.device('cuda')))
loss.mean().backward()
else:
loss.backward()
#loss.backward()
#print ("backward time = {}".format(time.time() - t1))
torch.nn.utils.clip_grad_norm_(model.parameters(), clip_norm)
'''for param in filter(lambda p: p.requires_grad,model.parameters()):
print(param.grad.data.sum())'''
#start debugger
#import pdb; pdb.set_trace()
optimizer.step()
'''print('grad check after:')
for f in model.conv.parameters():
print('data is')
print(f.data [0][0])
#print('grad is')
#print(f.grad[0][0].item())
break'''
optimizer.zero_grad()
train_loss += float(loss.mean())
#top1.add_point_eval5(verb_predict, verb, role_predict, labels)
#top5.add_point_eval5(verb_predict, verb, role_predict, labels)
top1.add_point_verb_only_eval(id, verb_predict, verb)
top5.add_point_verb_only_eval(id, verb_predict, verb)
if total_steps % print_freq == 0:
top1_a = top1.get_average_results()
top5_a = top5.get_average_results()
print ("{},{},{}, {} , {}, loss = {:.2f}, avg loss = {:.2f}"
.format(total_steps-1,epoch,i, utils.format_dict(top1_a, "{:.2f}", "1-"),
utils.format_dict(top5_a,"{:.2f}","5-"), loss.mean().item(),
train_loss / ((total_steps-1)%eval_frequency) ))
if total_steps % eval_frequency == 0:
top1, top5, val_loss = eval(model, dev_loader, encoder, gpu_mode)
model.train()
top1_avg = top1.get_average_results()
top5_avg = top5.get_average_results()
avg_score = top1_avg["verb"] + top1_avg["value"] + top1_avg["value-all"] + top5_avg["verb"] + \
top5_avg["value"] + top5_avg["value-all"]
avg_score /= 8
print ('Dev {} average :{:.2f} {} {}'.format(total_steps-1, avg_score*100,
utils.format_dict(top1_avg,'{:.2f}', '1-'),
utils.format_dict(top5_avg, '{:.2f}', '5-')))
#print('Dev loss :', val_loss)
dev_score_list.append(avg_score)
max_score = max(dev_score_list)
if max_score == dev_score_list[-1]:
torch.save(model.state_dict(), model_dir + "/{}_verbq_iter0_change.model".format( model_name))
print ('New best model saved! {0}'.format(max_score))
#eval on the trainset
'''top1, top5, val_loss = eval(model, traindev_loader, encoder, gpu_mode)
model.train()
top1_avg = top1.get_average_results()
top5_avg = top5.get_average_results()
avg_score = top1_avg["verb"] + top1_avg["value"] + top1_avg["value-all"] + top5_avg["verb"] + \
top5_avg["value"] + top5_avg["value-all"] + top5_avg["value*"] + top5_avg["value-all*"]
avg_score /= 8
print ('TRAINDEV {} average :{:.2f} {} {}'.format(total_steps-1, avg_score*100,
utils.format_dict(top1_avg,'{:.2f}', '1-'),
utils.format_dict(top5_avg, '{:.2f}', '5-')))'''
print('current train loss', train_loss)
train_loss = 0
top1 = imsitu_scorer(encoder, 1, 3)
top5 = imsitu_scorer(encoder, 5, 3)
del verb_predict, loss, img, verb, labels
#break
print('Epoch ', epoch, ' completed!')
scheduler.step()
#break
def eval(model, dev_loader, encoder, gpu_mode, write_to_file = False):
model.eval()
val_loss = 0
print ('evaluating model...')
top1 = imsitu_scorer(encoder, 1, 3, write_to_file)
top5 = imsitu_scorer(encoder, 5, 3)
with torch.no_grad():
mx = len(dev_loader)
for i, (img_id, img, verb, labels) in enumerate(dev_loader):
#print("{}/{} batches\r".format(i+1,mx)) ,
'''im_data = torch.squeeze(im_data,0)
im_info = torch.squeeze(im_info,0)
gt_boxes = torch.squeeze(gt_boxes,0)
num_boxes = torch.squeeze(num_boxes,0)
verb = torch.squeeze(verb,0)
roles = torch.squeeze(roles,0)
labels = torch.squeeze(labels,0)'''
if gpu_mode >= 0:
img = torch.autograd.Variable(img.cuda())
verb = torch.autograd.Variable(verb.cuda())
labels = torch.autograd.Variable(labels.cuda())
else:
img = torch.autograd.Variable(img)
verb = torch.autograd.Variable(verb)
labels = torch.autograd.Variable(labels)
verb_predict, _= model(img, verb, labels)
'''loss = model.calculate_eval_loss(verb_predict, verb, role_predict, labels)
val_loss += loss.item()'''
top1.add_point_verb_only_eval(img_id, verb_predict, verb)
top5.add_point_verb_only_eval(img_id, verb_predict, verb)
del img, verb, labels
break
#return top1, top5, val_loss/mx
return top1, top5, 0
def main():
|
if __name__ == "__main__":
main()
| import argparse
parser = argparse.ArgumentParser(description="imsitu VSRL. Training, evaluation and prediction.")
parser.add_argument("--gpuid", default=-1, help="put GPU id > -1 in GPU mode", type=int)
#parser.add_argument("--command", choices = ["train", "eval", "resume", 'predict'], required = True)
parser.add_argument('--resume_training', action='store_true', help='Resume training from the model [resume_model]')
parser.add_argument('--resume_model', type=str, default='', help='The model we resume')
parser.add_argument('--verb_module', type=str, default='', help='pretrained verb module')
parser.add_argument('--role_module', type=str, default='', help='pretrained role module')
parser.add_argument('--train_role', action='store_true', help='cnn fix, verb fix, role train from the scratch')
parser.add_argument('--finetune_verb', action='store_true', help='cnn fix, verb finetune, role train from the scratch')
parser.add_argument('--finetune_cnn', action='store_true', help='cnn finetune, verb finetune, role train from the scratch')
parser.add_argument('--output_dir', type=str, default='./trained_models', help='Location to output the model')
parser.add_argument('--evaluate', action='store_true', help='Only use the testing mode')
parser.add_argument('--test', action='store_true', help='Only use the testing mode')
parser.add_argument('--dataset_folder', type=str, default='./imSitu', help='Location of annotations')
parser.add_argument('--imgset_dir', type=str, default='./resized_256', help='Location of original images')
parser.add_argument('--frcnn_feat_dir', type=str, help='Location of output from detectron')
#todo: train role module separately with gt verbs
args = parser.parse_args()
batch_size = 640
#lr = 5e-6
lr = 0.0001
lr_max = 5e-4
lr_gamma = 0.1
lr_step = 15
clip_norm = 0.5
weight_decay = 1e-4
n_epoch = 500
n_worker = 3
#dataset_folder = 'imSitu'
#imgset_folder = 'resized_256'
dataset_folder = args.dataset_folder
imgset_folder = args.imgset_dir
print('model spec :, top down att with role q ')
train_set = json.load(open(dataset_folder + "/updated_train_new.json"))
imsitu_roleq = json.load(open("imsitu_data/imsitu_questions_prev.json"))
verb_templates = json.load(open("imsitu_data/verb_questions_template_new.json"))
encoder = imsitu_encoder(train_set, imsitu_roleq, verb_templates)
model = model_verbq_working.BaseModel(encoder, args.gpuid)
# To group up the features
#cnn_features, role_features = utils.group_features_noun(model)
cnn_features, role_features = utils.group_features_noun(model)
train_set = imsitu_loader_roleq_updated(imgset_folder, train_set, encoder, model.train_preprocess())
train_loader = torch.utils.data.DataLoader(train_set, batch_size=4, shuffle=True, num_workers=n_worker)
dev_set = json.load(open(dataset_folder +"/dev.json"))
dev_set = imsitu_loader_roleq_updated(imgset_folder, dev_set, encoder, model.dev_preprocess())
dev_loader = torch.utils.data.DataLoader(dev_set, batch_size=4, shuffle=True, num_workers=n_worker)
test_set = json.load(open(dataset_folder +"/test.json"))
test_set = imsitu_loader_roleq_updated(imgset_folder, test_set, encoder, model.dev_preprocess())
test_loader = torch.utils.data.DataLoader(test_set, batch_size=64, shuffle=True, num_workers=n_worker)
traindev_set = json.load(open(dataset_folder +"/dev.json"))
traindev_set = imsitu_loader_roleq_updated(imgset_folder, traindev_set, encoder, model.dev_preprocess())
traindev_loader = torch.utils.data.DataLoader(traindev_set, batch_size=8, shuffle=True, num_workers=n_worker)
#utils.load_net(args.verb_module, [model.verb_module])
#utils.load_net(args.role_module, [model.role_module])
model_name = 'train_full'
if not os.path.exists(args.output_dir):
os.mkdir(args.output_dir)
torch.manual_seed(1234)
if args.gpuid >= 0:
#print('GPU enabled')
model.cuda()
torch.cuda.manual_seed(1234)
torch.backends.cudnn.deterministic = True
optimizer = torch.optim.Adamax([
{'params': cnn_features, 'lr': 5e-5},
{'params': role_features}
], lr=1e-3)
#optimizer = torch.optim.Adam(model.parameters(), lr=lr, weight_decay=weight_decay)
#scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=lr_step, gamma=lr_gamma)
#gradient clipping, grad check
scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.9)
if args.evaluate:
top1, top5, val_loss = eval(model, dev_loader, encoder, args.gpuid, write_to_file = True)
top1_avg = top1.get_average_results()
top5_avg = top5.get_average_results()
avg_score = top1_avg["verb"] + top1_avg["value"] + top1_avg["value-all"] + top5_avg["verb"] + \
top5_avg["value"] + top5_avg["value-all"] + top5_avg["value*"] + top5_avg["value-all*"]
avg_score /= 8
print ('Dev average :{:.2f} {} {}'.format( avg_score*100,
utils.format_dict(top1_avg,'{:.2f}', '1-'),
utils.format_dict(top5_avg, '{:.2f}', '5-')))
#write results to csv file
role_dict = top1.role_dict
fail_val_all = top1.value_all_dict
pass_val_dict = top1.vall_all_correct
with open('role_pred_data.json', 'w') as fp:
json.dump(role_dict, fp, indent=4)
with open('fail_val_all.json', 'w') as fp:
json.dump(fail_val_all, fp, indent=4)
with open('pass_val_all.json', 'w') as fp:
json.dump(pass_val_dict, fp, indent=4)
print('Writing predictions to file completed !')
elif args.test:
top1, top5, val_loss = eval(model, test_loader, encoder, args.gpuid, write_to_file = True)
top1_avg = top1.get_average_results()
top5_avg = top5.get_average_results()
avg_score = top1_avg["verb"] + top1_avg["value"] + top1_avg["value-all"] + top5_avg["verb"] + \
top5_avg["value"] + top5_avg["value-all"] + top5_avg["value*"] + top5_avg["value-all*"]
avg_score /= 8
print ('Test average :{:.2f} {} {}'.format( avg_score*100,
utils.format_dict(top1_avg,'{:.2f}', '1-'),
utils.format_dict(top5_avg, '{:.2f}', '5-')))
else:
print('Model training started!')
train(model, train_loader, dev_loader, traindev_loader, optimizer, scheduler, n_epoch, args.output_dir, encoder, args.gpuid, clip_norm, lr_max, model_name, args) |
script.py | """Generic script exporter class for any kernel language"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import entrypoints
from .templateexporter import TemplateExporter
from traitlets import Dict, default
from .base import get_exporter
class ScriptExporter(TemplateExporter):
# Caches of already looked-up and instantiated exporters for delegation:
_exporters = Dict()
_lang_exporters = Dict()
@default('template_file')
def _template_file_default(self):
return 'script.tpl'
def _get_language_exporter(self, lang_name):
"""Find an exporter for the language name from notebook metadata.
Uses the nbconvert.exporters.script group of entry points.
Returns None if no exporter is found.
"""
if lang_name not in self._lang_exporters:
try:
Exporter = entrypoints.get_single(
'nbconvert.exporters.script', lang_name).load()
except entrypoints.NoSuchEntryPoint:
self._lang_exporters[lang_name] = None
else:
self._lang_exporters[lang_name] = Exporter(parent=self)
return self._lang_exporters[lang_name]
def from_notebook_node(self, nb, resources=None, **kw):
langinfo = nb.metadata.get('language_info', {}) | # delegate to custom exporter, if specified
exporter_name = langinfo.get('nbconvert_exporter')
if exporter_name and exporter_name != 'script':
self.log.debug("Loading script exporter: %s", exporter_name)
if exporter_name not in self._exporters:
Exporter = get_exporter(exporter_name)
self._exporters[exporter_name] = Exporter(parent=self)
exporter = self._exporters[exporter_name]
return exporter.from_notebook_node(nb, resources, **kw)
# Look up a script exporter for this notebook's language
lang_name = langinfo.get('name')
if lang_name:
self.log.debug("Using script exporter for language: %s", lang_name)
exporter = self._get_language_exporter(lang_name)
if exporter is not None:
return exporter.from_notebook_node(nb, resources, **kw)
# Fall back to plain script export
self.file_extension = langinfo.get('file_extension', '.txt')
self.output_mimetype = langinfo.get('mimetype', 'text/plain')
return super(ScriptExporter, self).from_notebook_node(nb, resources, **kw) | |
limitprocessor_identifier_localsubnet_test.py | #!/usr/bin/env python3
"""
Test for local-subnet identifier
"""
import unittest
import netifaces
from base_test import PschedTestBase
from pscheduler.limitprocessor.identifier.localsubnet import *
DATA = {
}
class TestLimitprocessorIdentifierLocalSubnet(PschedTestBase):
|
if __name__ == '__main__':
unittest.main()
| """
Test the Identifier
"""
def test_data_is_valid(self):
"""Limit Processor / Identifier Local Subnet / Data Validation"""
self.assertEqual(data_is_valid(DATA), (True, "OK"))
self.assertEqual(data_is_valid({ "abc": 123 }),
(False, 'Data is not an object or not empty.'))
def test_identifier(self):
"""Limit Processor / Identifier Local Subnet / Identifier"""
test_ifaces = {
"lo0": {
netifaces.AF_INET: [
{'addr': '127.0.0.1', 'netmask': '255.0.0.0', 'peer': '127.0.0.1'}
],
netifaces.AF_INET6: [
{'addr': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128', 'peer': '::1', 'flags': 0},
{'addr': 'fe80::1%lo0', 'netmask': 'ffff:ffff:ffff:ffff::/64', 'flags': 0}
]
}
}
ident = IdentifierLocalSubnet(DATA, test_ifaces=test_ifaces)
self.assertEqual(
ident.evaluate({ "requester": "127.0.0.5" }),
True)
self.assertEqual(
ident.evaluate({ "requester": "fe80::1" }),
True)
self.assertEqual(
ident.evaluate({ "requester": "192.0.2.9" }),
False)
self.assertEqual(
ident.evaluate({ "requester": "2001:db8::1" }),
False) |
benchmark.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.12.4
// source: isuxportal/services/contestant/benchmark.proto
package contestant
import (
proto "github.com/golang/protobuf/proto"
resources "github.com/isucon/isucon10-portal/proto.go/isuxportal/resources"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type ListBenchmarkJobsQuery struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Limit int64 `protobuf:"varint,1,opt,name=limit,proto3" json:"limit,omitempty"`
}
func (x *ListBenchmarkJobsQuery) Reset() {
*x = ListBenchmarkJobsQuery{}
if protoimpl.UnsafeEnabled {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListBenchmarkJobsQuery) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListBenchmarkJobsQuery) ProtoMessage() {}
func (x *ListBenchmarkJobsQuery) ProtoReflect() protoreflect.Message {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListBenchmarkJobsQuery.ProtoReflect.Descriptor instead.
func (*ListBenchmarkJobsQuery) Descriptor() ([]byte, []int) {
return file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP(), []int{0}
}
func (x *ListBenchmarkJobsQuery) GetLimit() int64 {
if x != nil {
return x.Limit
}
return 0
}
type ListBenchmarkJobsResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Jobs []*resources.BenchmarkJob `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"`
}
func (x *ListBenchmarkJobsResponse) Reset() {
*x = ListBenchmarkJobsResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListBenchmarkJobsResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListBenchmarkJobsResponse) ProtoMessage() {}
func (x *ListBenchmarkJobsResponse) ProtoReflect() protoreflect.Message {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListBenchmarkJobsResponse.ProtoReflect.Descriptor instead.
func (*ListBenchmarkJobsResponse) Descriptor() ([]byte, []int) {
return file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP(), []int{1}
}
func (x *ListBenchmarkJobsResponse) GetJobs() []*resources.BenchmarkJob {
if x != nil {
return x.Jobs
}
return nil
}
type EnqueueBenchmarkJobRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// target ContestantInstance id
TargetId int64 `protobuf:"varint,1,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"`
}
func (x *EnqueueBenchmarkJobRequest) Reset() {
*x = EnqueueBenchmarkJobRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EnqueueBenchmarkJobRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EnqueueBenchmarkJobRequest) ProtoMessage() {}
func (x *EnqueueBenchmarkJobRequest) ProtoReflect() protoreflect.Message {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EnqueueBenchmarkJobRequest.ProtoReflect.Descriptor instead.
func (*EnqueueBenchmarkJobRequest) Descriptor() ([]byte, []int) {
return file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP(), []int{2}
}
func (x *EnqueueBenchmarkJobRequest) GetTargetId() int64 {
if x != nil {
return x.TargetId
}
return 0
}
type EnqueueBenchmarkJobResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Job *resources.BenchmarkJob `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"`
}
func (x *EnqueueBenchmarkJobResponse) Reset() {
*x = EnqueueBenchmarkJobResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EnqueueBenchmarkJobResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EnqueueBenchmarkJobResponse) ProtoMessage() {}
func (x *EnqueueBenchmarkJobResponse) ProtoReflect() protoreflect.Message {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EnqueueBenchmarkJobResponse.ProtoReflect.Descriptor instead.
func (*EnqueueBenchmarkJobResponse) Descriptor() ([]byte, []int) {
return file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP(), []int{3}
}
func (x *EnqueueBenchmarkJobResponse) GetJob() *resources.BenchmarkJob {
if x != nil {
return x.Job
}
return nil
}
// Query parameter
type GetBenchmarkJobQuery struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
}
func (x *GetBenchmarkJobQuery) Reset() {
*x = GetBenchmarkJobQuery{}
if protoimpl.UnsafeEnabled {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetBenchmarkJobQuery) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetBenchmarkJobQuery) ProtoMessage() {}
func (x *GetBenchmarkJobQuery) ProtoReflect() protoreflect.Message {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetBenchmarkJobQuery.ProtoReflect.Descriptor instead.
func (*GetBenchmarkJobQuery) Descriptor() ([]byte, []int) {
return file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP(), []int{4}
}
func (x *GetBenchmarkJobQuery) GetId() int64 {
if x != nil {
return x.Id
}
return 0
}
type GetBenchmarkJobResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Job *resources.BenchmarkJob `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"`
}
func (x *GetBenchmarkJobResponse) Reset() {
*x = GetBenchmarkJobResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetBenchmarkJobResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetBenchmarkJobResponse) ProtoMessage() {}
func (x *GetBenchmarkJobResponse) ProtoReflect() protoreflect.Message {
mi := &file_isuxportal_services_contestant_benchmark_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetBenchmarkJobResponse.ProtoReflect.Descriptor instead.
func (*GetBenchmarkJobResponse) Descriptor() ([]byte, []int) {
return file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP(), []int{5}
}
func (x *GetBenchmarkJobResponse) GetJob() *resources.BenchmarkJob {
if x != nil {
return x.Job
}
return nil
}
var File_isuxportal_services_contestant_benchmark_proto protoreflect.FileDescriptor
var file_isuxportal_services_contestant_benchmark_proto_rawDesc = []byte{
0x0a, 0x2e, 0x69, 0x73, 0x75, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x73, 0x74, 0x61, 0x6e, 0x74,
0x2f, 0x62, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x12, 0x24, 0x69, 0x73, 0x75, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74,
0x65, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x1a, 0x28, 0x69, 0x73, 0x75, 0x78, 0x70, 0x6f, 0x72, 0x74,
0x61, 0x6c, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x62, 0x65, 0x6e,
0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x5f, 0x6a, 0x6f, 0x62, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x22, 0x2e, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72,
0x6b, 0x4a, 0x6f, 0x62, 0x73, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69,
0x6d, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74,
0x22, 0x59, 0x0a, 0x19, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72,
0x6b, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a,
0x04, 0x6a, 0x6f, 0x62, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x69, 0x73,
0x75, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x72,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61,
0x72, 0x6b, 0x4a, 0x6f, 0x62, 0x52, 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x22, 0x39, 0x0a, 0x1a, 0x45,
0x6e, 0x71, 0x75, 0x65, 0x75, 0x65, 0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x4a,
0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x61, 0x72,
0x67, 0x65, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x74, 0x61,
0x72, 0x67, 0x65, 0x74, 0x49, 0x64, 0x22, 0x59, 0x0a, 0x1b, 0x45, 0x6e, 0x71, 0x75, 0x65, 0x75,
0x65, 0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x28, 0x2e, 0x69, 0x73, 0x75, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e,
0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f,
0x62, 0x22, 0x26, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72,
0x6b, 0x4a, 0x6f, 0x62, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18,
0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, 0x69, 0x64, 0x22, 0x55, 0x0a, 0x17, 0x47, 0x65, 0x74,
0x42, 0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x28, 0x2e, 0x69, 0x73, 0x75, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x42,
0x65, 0x6e, 0x63, 0x68, 0x6d, 0x61, 0x72, 0x6b, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62,
0x42, 0x4b, 0x5a, 0x49, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x69,
0x73, 0x75, 0x63, 0x6f, 0x6e, 0x2f, 0x69, 0x73, 0x75, 0x63, 0x6f, 0x6e, 0x31, 0x30, 0x2d, 0x70,
0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x67, 0x6f, 0x2f, 0x69,
0x73, 0x75, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63,
0x65, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x62, 0x06, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_isuxportal_services_contestant_benchmark_proto_rawDescOnce sync.Once
file_isuxportal_services_contestant_benchmark_proto_rawDescData = file_isuxportal_services_contestant_benchmark_proto_rawDesc
)
func | () []byte {
file_isuxportal_services_contestant_benchmark_proto_rawDescOnce.Do(func() {
file_isuxportal_services_contestant_benchmark_proto_rawDescData = protoimpl.X.CompressGZIP(file_isuxportal_services_contestant_benchmark_proto_rawDescData)
})
return file_isuxportal_services_contestant_benchmark_proto_rawDescData
}
var file_isuxportal_services_contestant_benchmark_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
var file_isuxportal_services_contestant_benchmark_proto_goTypes = []interface{}{
(*ListBenchmarkJobsQuery)(nil), // 0: isuxportal.proto.services.contestant.ListBenchmarkJobsQuery
(*ListBenchmarkJobsResponse)(nil), // 1: isuxportal.proto.services.contestant.ListBenchmarkJobsResponse
(*EnqueueBenchmarkJobRequest)(nil), // 2: isuxportal.proto.services.contestant.EnqueueBenchmarkJobRequest
(*EnqueueBenchmarkJobResponse)(nil), // 3: isuxportal.proto.services.contestant.EnqueueBenchmarkJobResponse
(*GetBenchmarkJobQuery)(nil), // 4: isuxportal.proto.services.contestant.GetBenchmarkJobQuery
(*GetBenchmarkJobResponse)(nil), // 5: isuxportal.proto.services.contestant.GetBenchmarkJobResponse
(*resources.BenchmarkJob)(nil), // 6: isuxportal.proto.resources.BenchmarkJob
}
var file_isuxportal_services_contestant_benchmark_proto_depIdxs = []int32{
6, // 0: isuxportal.proto.services.contestant.ListBenchmarkJobsResponse.jobs:type_name -> isuxportal.proto.resources.BenchmarkJob
6, // 1: isuxportal.proto.services.contestant.EnqueueBenchmarkJobResponse.job:type_name -> isuxportal.proto.resources.BenchmarkJob
6, // 2: isuxportal.proto.services.contestant.GetBenchmarkJobResponse.job:type_name -> isuxportal.proto.resources.BenchmarkJob
3, // [3:3] is the sub-list for method output_type
3, // [3:3] is the sub-list for method input_type
3, // [3:3] is the sub-list for extension type_name
3, // [3:3] is the sub-list for extension extendee
0, // [0:3] is the sub-list for field type_name
}
func init() { file_isuxportal_services_contestant_benchmark_proto_init() }
func file_isuxportal_services_contestant_benchmark_proto_init() {
if File_isuxportal_services_contestant_benchmark_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_isuxportal_services_contestant_benchmark_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListBenchmarkJobsQuery); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_isuxportal_services_contestant_benchmark_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListBenchmarkJobsResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_isuxportal_services_contestant_benchmark_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EnqueueBenchmarkJobRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_isuxportal_services_contestant_benchmark_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EnqueueBenchmarkJobResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_isuxportal_services_contestant_benchmark_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetBenchmarkJobQuery); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_isuxportal_services_contestant_benchmark_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetBenchmarkJobResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_isuxportal_services_contestant_benchmark_proto_rawDesc,
NumEnums: 0,
NumMessages: 6,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_isuxportal_services_contestant_benchmark_proto_goTypes,
DependencyIndexes: file_isuxportal_services_contestant_benchmark_proto_depIdxs,
MessageInfos: file_isuxportal_services_contestant_benchmark_proto_msgTypes,
}.Build()
File_isuxportal_services_contestant_benchmark_proto = out.File
file_isuxportal_services_contestant_benchmark_proto_rawDesc = nil
file_isuxportal_services_contestant_benchmark_proto_goTypes = nil
file_isuxportal_services_contestant_benchmark_proto_depIdxs = nil
}
| file_isuxportal_services_contestant_benchmark_proto_rawDescGZIP |
keys_panel_spec.js | import Vue from 'vue';
import DeployKeysStore from '~/deploy_keys/store';
import deployKeysPanel from '~/deploy_keys/components/keys_panel.vue';
describe('Deploy keys panel', () => {
const data = getJSONFixture('deploy_keys/keys.json');
let vm;
beforeEach(done => {
const DeployKeysPanelComponent = Vue.extend(deployKeysPanel);
const store = new DeployKeysStore();
store.keys = data;
vm = new DeployKeysPanelComponent({
propsData: {
title: 'test',
keys: data.enabled_keys,
showHelpBox: true,
store,
endpoint: 'https://test.host/dummy/endpoint',
},
}).$mount();
setTimeout(done);
});
it('renders list of keys', () => {
expect(vm.$el.querySelectorAll('.deploy-key').length).toBe(vm.keys.length);
});
it('renders table header', () => {
const tableHeader = vm.$el.querySelector('.table-row-header'); |
expect(tableHeader).toExist();
expect(tableHeader.textContent).toContain('Deploy key');
expect(tableHeader.textContent).toContain('Project usage');
expect(tableHeader.textContent).toContain('Created');
});
it('renders help box if keys are empty', done => {
vm.keys = [];
Vue.nextTick(() => {
expect(vm.$el.querySelector('.settings-message')).toBeDefined();
expect(vm.$el.querySelector('.settings-message').textContent.trim()).toBe(
'No deploy keys found. Create one with the form above.',
);
done();
});
});
it('renders no table header if keys are empty', done => {
vm.keys = [];
Vue.nextTick(() => {
expect(vm.$el.querySelector('.table-row-header')).not.toExist();
done();
});
});
}); | |
main.py | import datetime
import json
import multiprocessing
import os
import random
import re
import time
import discum
version = 'v0.01'
config_path = 'data/config.json'
logo = f'''
###### ### ### ## ####### ### ## ## ###
## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ## ##
##### ## ## #### #### ## ## ## # ## ## ##
## ## ## ## ## ## ## ## ####### ## ##
## ## ## ## ## ## ## ## ## ### ### ## ##
#### ### ### ## ####### ### ## ## ###
~ Pokétwo Autocatcher {version}
'''
num_pokemon = 0
shiny = 0
legendary = 0
mythical = 0
poketwo_id = '716390085896962058'
def auto_config():
global user_token, channel_id
if not os.path.exists(config_path):
with open(config_path, "a") as file:
auth_token = input("Enter you Discord auth token: ")
channel_id = input("Enter the preferred Channel ID for spamming and catching: ")
file.write("{\n")
file.write(f' "user_token" : "{auth_token}",\n')
file.write(f' "channel_id" : "{channel_id}"\n')
file.write("}")
os.system('cls' if os.name=='nt' else 'clear')
with open(config_path,'r') as file:
info = json.loads(file.read())
user_token = info['user_token']
channel_id = info['channel_id']
with open('data/pokemon.txt', 'r', encoding='utf8') as file:
pokemon_list = file.read()
with open('data/legendary.txt','r') as file:
legendary_list = file.read()
with open('data/mythical.txt','r') as file:
mythical_list = file.read()
auto_config()
print(logo)
bot = discum.Client(token=user_token, log=False)
def solve(message):
hint = [message[i] for i in range(15, len(message) - 1) if message[i] != '\\']
hint_string = ''.join(hint)
return re.findall(
'^' + hint_string.replace('_', '.') + '$', pokemon_list, re.MULTILINE
)
def spam():
while True:
random_number = random.getrandbits(128)
bot.sendMessage(channel_id, random_number)
intervals = [2.0,2.1,2.2,2.3,2.4,2.5]
time.sleep(random.choice(intervals))
def start_spam():
new_process = multiprocessing.Process(target=spam)
new_process.start()
return new_process
def stop(process):
process.terminate()
def log(string):
now = datetime.datetime.now()
current_time = now.strftime('%H:%M:%S')
print(f'[{current_time}]', string)
@bot.gateway.command
def on_ready(resp):
if resp.event.ready_supplemental:
user = bot.gateway.session.user
log(f'Logged into account: {user["username"]}#{user["discriminator"]}')
@bot.gateway.command
def on_message(resp):
global spam_process
if resp.event.message:
m = resp.parsed.auto()
if m['channel_id'] == channel_id and m['author']['id'] == poketwo_id:
if m['embeds']:
embed_title = m['embeds'][0]['title']
if 'wild pokémon has appeared!' in embed_title:
stop(spam_process)
time.sleep(2)
bot.sendMessage(channel_id, '<@716390085896962058> h')
elif "Congratulations" in embed_title:
embed_content = m['embeds'][0]['description']
if 'now level' in embed_content:
stop(spam_process)
split = embed_content.split(' ')
a = embed_content.count(' ')
level = int(split[a].replace('!', '')) | spam_process = start_spam()
else:
content = m['content']
if 'The pokémon is ' in content:
if len(solve(content)) == 0:
log('Pokemon not found.')
else:
for i in solve(content):
stop(spam_process)
time.sleep(2)
bot.sendMessage(channel_id, f'<@716390085896962058> c {i}')
time.sleep(2)
spam_process = start_spam()
elif 'Congratulations' in content:
global shiny
global legendary
global num_pokemon
global mythical
num_pokemon += 1
split = content.split(' ')
pokemon = split[7].replace('!','')
if 'These colors seem unusual...' in content:
shiny += 1
log(f'A shiny Pokémon was caught! Pokémon: {pokemon}')
log(f'Shiny: {shiny} | Legendary: {legendary} | Mythical: {mythical}')
elif re.findall(
f'^{pokemon}$', legendary_list, re.MULTILINE
):
legendary += 1
log(f'A legendary Pokémon was caught! Pokémon: {pokemon}')
log(f'Shiny: {shiny} | Legendary: {legendary} | Mythical: {mythical}')
elif re.findall(f'^{pokemon}$', mythical_list, re.MULTILINE):
mythical += 1
log(f'A mythical Pokémon was caught! Pokémon: {pokemon}')
log(f'Shiny: {shiny} | Legendary: {legendary} | Mythical: {mythical}')
else:
print(f'Total Pokémon Caught: {num_pokemon}')
elif 'human' in content:
stop(spam_process)
log('Captcha Detected; Autocatcher Paused. Press enter to restart.')
input()
bot.sendMessage(channel_id, '<@716390085896962058> h')
if __name__ == '__main__':
print('\nEvent Log:')
spam_process = start_spam()
bot.gateway.run(auto_reconnect=True) | if level == 100:
#wait will implement in next update
pass |
fetch-json.ts | import fetch from 'cross-fetch';
export async function fetchJson<Return>(
apiGateway: string,
endpoint: string,
{
method = 'GET',
body,
jwt,
asUser,
xml,
plaintext,
formData,
returnText,
headers: additionalHeaders = {},
raw,
}: {
method?: 'GET' | 'PUT' | 'POST' | 'PATCH' | 'DELETE' | 'OPTIONS' | 'HEAD';
body?: any;
jwt?: string;
asUser?: { userId?: number; siteId?: number; userName?: string };
xml?: boolean;
plaintext?: boolean;
returnText?: boolean;
formData?: boolean;
headers?: any;
raw?: boolean;
}
): Promise<
| { error: true; data: { error: string }; status: number; debugResponse?: any }
| { error: false; data: Return; status: number }
> {
const headers: any = {
Accept: 'application/json',
...additionalHeaders,
};
if (jwt) {
headers.Authorization = `Bearer ${jwt}`;
}
if (!formData) {
if (plaintext && body) {
headers['Content-Type'] = 'text/plain';
} else if (xml && body) {
headers['Content-Type'] = 'text/xml';
} else if (body) {
headers['Content-Type'] = 'application/json';
}
}
if (asUser) {
if (asUser.userId) {
headers['x-madoc-user-id'] = `${asUser.userId}`;
}
if (asUser.siteId) {
headers['x-madoc-site-id'] = `${asUser.siteId}`;
}
if (asUser.userName) {
headers['x-madoc-user-name'] = `${asUser.userName}`;
}
}
const resp = await fetch(`${apiGateway}${endpoint}`, {
headers,
method,
body: body ? (xml || plaintext || formData ? body : JSON.stringify(body)) : undefined,
credentials: 'omit',
});
if (resp.ok) {
try {
if (raw) {
return {
error: false,
status: resp.status,
data: resp as any,
};
}
if (returnText) {
return {
error: false,
status: resp.status,
data: (await resp.text()) as any,
};
}
return {
error: false,
status: resp.status,
data: await resp.json(),
};
} catch (err) {
if (resp.statusText === 'OK') {
return {
error: false,
status: resp.status,
data: undefined as any,
};
}
} |
try {
const errorData = await resp.json();
if (errorData.error) {
return {
error: true,
status: resp.status,
data: { error: errorData.error },
};
}
} catch (e) {
// fall through to the default unknown error.
}
return {
error: true,
status: resp.status,
data: { error: 'Unknown error' },
debugResponse: resp,
};
} | } |
pulumiTypes.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20191001preview
import (
"context"
"reflect"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Managed identity generic object.
type ManagedServiceIdentity struct {
// Type of the managed identity.
Type *string `pulumi:"type"`
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
UserAssignedIdentities map[string]interface{} `pulumi:"userAssignedIdentities"`
}
// ManagedServiceIdentityInput is an input type that accepts ManagedServiceIdentityArgs and ManagedServiceIdentityOutput values.
// You can construct a concrete instance of `ManagedServiceIdentityInput` via:
//
// ManagedServiceIdentityArgs{...}
type ManagedServiceIdentityInput interface {
pulumi.Input
ToManagedServiceIdentityOutput() ManagedServiceIdentityOutput
ToManagedServiceIdentityOutputWithContext(context.Context) ManagedServiceIdentityOutput
}
// Managed identity generic object.
type ManagedServiceIdentityArgs struct {
// Type of the managed identity.
Type pulumi.StringPtrInput `pulumi:"type"`
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
UserAssignedIdentities pulumi.MapInput `pulumi:"userAssignedIdentities"`
}
func (ManagedServiceIdentityArgs) ElementType() reflect.Type {
return reflect.TypeOf((*ManagedServiceIdentity)(nil)).Elem()
}
func (i ManagedServiceIdentityArgs) ToManagedServiceIdentityOutput() ManagedServiceIdentityOutput {
return i.ToManagedServiceIdentityOutputWithContext(context.Background())
}
func (i ManagedServiceIdentityArgs) ToManagedServiceIdentityOutputWithContext(ctx context.Context) ManagedServiceIdentityOutput {
return pulumi.ToOutputWithContext(ctx, i).(ManagedServiceIdentityOutput)
}
func (i ManagedServiceIdentityArgs) ToManagedServiceIdentityPtrOutput() ManagedServiceIdentityPtrOutput {
return i.ToManagedServiceIdentityPtrOutputWithContext(context.Background())
}
func (i ManagedServiceIdentityArgs) ToManagedServiceIdentityPtrOutputWithContext(ctx context.Context) ManagedServiceIdentityPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(ManagedServiceIdentityOutput).ToManagedServiceIdentityPtrOutputWithContext(ctx)
}
// ManagedServiceIdentityPtrInput is an input type that accepts ManagedServiceIdentityArgs, ManagedServiceIdentityPtr and ManagedServiceIdentityPtrOutput values.
// You can construct a concrete instance of `ManagedServiceIdentityPtrInput` via:
//
// ManagedServiceIdentityArgs{...}
//
// or:
//
// nil
type ManagedServiceIdentityPtrInput interface {
pulumi.Input
ToManagedServiceIdentityPtrOutput() ManagedServiceIdentityPtrOutput
ToManagedServiceIdentityPtrOutputWithContext(context.Context) ManagedServiceIdentityPtrOutput
}
type managedServiceIdentityPtrType ManagedServiceIdentityArgs
func ManagedServiceIdentityPtr(v *ManagedServiceIdentityArgs) ManagedServiceIdentityPtrInput {
return (*managedServiceIdentityPtrType)(v)
}
func (*managedServiceIdentityPtrType) ElementType() reflect.Type {
return reflect.TypeOf((**ManagedServiceIdentity)(nil)).Elem()
}
func (i *managedServiceIdentityPtrType) ToManagedServiceIdentityPtrOutput() ManagedServiceIdentityPtrOutput {
return i.ToManagedServiceIdentityPtrOutputWithContext(context.Background())
}
func (i *managedServiceIdentityPtrType) ToManagedServiceIdentityPtrOutputWithContext(ctx context.Context) ManagedServiceIdentityPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(ManagedServiceIdentityPtrOutput)
}
// Managed identity generic object.
type ManagedServiceIdentityOutput struct{ *pulumi.OutputState }
func (ManagedServiceIdentityOutput) ElementType() reflect.Type {
return reflect.TypeOf((*ManagedServiceIdentity)(nil)).Elem()
}
func (o ManagedServiceIdentityOutput) ToManagedServiceIdentityOutput() ManagedServiceIdentityOutput {
return o
}
func (o ManagedServiceIdentityOutput) ToManagedServiceIdentityOutputWithContext(ctx context.Context) ManagedServiceIdentityOutput {
return o
}
func (o ManagedServiceIdentityOutput) ToManagedServiceIdentityPtrOutput() ManagedServiceIdentityPtrOutput {
return o.ToManagedServiceIdentityPtrOutputWithContext(context.Background())
}
func (o ManagedServiceIdentityOutput) ToManagedServiceIdentityPtrOutputWithContext(ctx context.Context) ManagedServiceIdentityPtrOutput {
return o.ApplyT(func(v ManagedServiceIdentity) *ManagedServiceIdentity {
return &v
}).(ManagedServiceIdentityPtrOutput)
}
// Type of the managed identity.
func (o ManagedServiceIdentityOutput) Type() pulumi.StringPtrOutput {
return o.ApplyT(func(v ManagedServiceIdentity) *string { return v.Type }).(pulumi.StringPtrOutput)
}
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
func (o ManagedServiceIdentityOutput) UserAssignedIdentities() pulumi.MapOutput {
return o.ApplyT(func(v ManagedServiceIdentity) map[string]interface{} { return v.UserAssignedIdentities }).(pulumi.MapOutput)
}
type ManagedServiceIdentityPtrOutput struct{ *pulumi.OutputState }
func (ManagedServiceIdentityPtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**ManagedServiceIdentity)(nil)).Elem()
}
func (o ManagedServiceIdentityPtrOutput) ToManagedServiceIdentityPtrOutput() ManagedServiceIdentityPtrOutput {
return o
}
func (o ManagedServiceIdentityPtrOutput) ToManagedServiceIdentityPtrOutputWithContext(ctx context.Context) ManagedServiceIdentityPtrOutput {
return o
}
func (o ManagedServiceIdentityPtrOutput) Elem() ManagedServiceIdentityOutput {
return o.ApplyT(func(v *ManagedServiceIdentity) ManagedServiceIdentity { return *v }).(ManagedServiceIdentityOutput)
}
// Type of the managed identity.
func (o ManagedServiceIdentityPtrOutput) Type() pulumi.StringPtrOutput {
return o.ApplyT(func(v *ManagedServiceIdentity) *string {
if v == nil {
return nil
}
return v.Type
}).(pulumi.StringPtrOutput)
}
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
func (o ManagedServiceIdentityPtrOutput) UserAssignedIdentities() pulumi.MapOutput {
return o.ApplyT(func(v *ManagedServiceIdentity) map[string]interface{} {
if v == nil {
return nil
}
return v.UserAssignedIdentities
}).(pulumi.MapOutput)
}
// Managed identity generic object.
type ManagedServiceIdentityResponse struct {
// ID of the Azure Active Directory.
TenantId string `pulumi:"tenantId"`
// Type of the managed identity.
Type *string `pulumi:"type"`
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
UserAssignedIdentities map[string]UserAssignedIdentityResponse `pulumi:"userAssignedIdentities"`
}
// ManagedServiceIdentityResponseInput is an input type that accepts ManagedServiceIdentityResponseArgs and ManagedServiceIdentityResponseOutput values.
// You can construct a concrete instance of `ManagedServiceIdentityResponseInput` via:
//
// ManagedServiceIdentityResponseArgs{...}
type ManagedServiceIdentityResponseInput interface {
pulumi.Input
ToManagedServiceIdentityResponseOutput() ManagedServiceIdentityResponseOutput
ToManagedServiceIdentityResponseOutputWithContext(context.Context) ManagedServiceIdentityResponseOutput
}
// Managed identity generic object.
type ManagedServiceIdentityResponseArgs struct {
// ID of the Azure Active Directory.
TenantId pulumi.StringInput `pulumi:"tenantId"`
// Type of the managed identity.
Type pulumi.StringPtrInput `pulumi:"type"`
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
UserAssignedIdentities UserAssignedIdentityResponseMapInput `pulumi:"userAssignedIdentities"`
}
func (ManagedServiceIdentityResponseArgs) ElementType() reflect.Type {
return reflect.TypeOf((*ManagedServiceIdentityResponse)(nil)).Elem()
}
func (i ManagedServiceIdentityResponseArgs) ToManagedServiceIdentityResponseOutput() ManagedServiceIdentityResponseOutput {
return i.ToManagedServiceIdentityResponseOutputWithContext(context.Background())
}
func (i ManagedServiceIdentityResponseArgs) ToManagedServiceIdentityResponseOutputWithContext(ctx context.Context) ManagedServiceIdentityResponseOutput {
return pulumi.ToOutputWithContext(ctx, i).(ManagedServiceIdentityResponseOutput)
}
func (i ManagedServiceIdentityResponseArgs) ToManagedServiceIdentityResponsePtrOutput() ManagedServiceIdentityResponsePtrOutput {
return i.ToManagedServiceIdentityResponsePtrOutputWithContext(context.Background())
}
func (i ManagedServiceIdentityResponseArgs) ToManagedServiceIdentityResponsePtrOutputWithContext(ctx context.Context) ManagedServiceIdentityResponsePtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(ManagedServiceIdentityResponseOutput).ToManagedServiceIdentityResponsePtrOutputWithContext(ctx)
}
// ManagedServiceIdentityResponsePtrInput is an input type that accepts ManagedServiceIdentityResponseArgs, ManagedServiceIdentityResponsePtr and ManagedServiceIdentityResponsePtrOutput values.
// You can construct a concrete instance of `ManagedServiceIdentityResponsePtrInput` via:
//
// ManagedServiceIdentityResponseArgs{...}
//
// or:
//
// nil
type ManagedServiceIdentityResponsePtrInput interface {
pulumi.Input
ToManagedServiceIdentityResponsePtrOutput() ManagedServiceIdentityResponsePtrOutput
ToManagedServiceIdentityResponsePtrOutputWithContext(context.Context) ManagedServiceIdentityResponsePtrOutput
}
type managedServiceIdentityResponsePtrType ManagedServiceIdentityResponseArgs
func | (v *ManagedServiceIdentityResponseArgs) ManagedServiceIdentityResponsePtrInput {
return (*managedServiceIdentityResponsePtrType)(v)
}
func (*managedServiceIdentityResponsePtrType) ElementType() reflect.Type {
return reflect.TypeOf((**ManagedServiceIdentityResponse)(nil)).Elem()
}
func (i *managedServiceIdentityResponsePtrType) ToManagedServiceIdentityResponsePtrOutput() ManagedServiceIdentityResponsePtrOutput {
return i.ToManagedServiceIdentityResponsePtrOutputWithContext(context.Background())
}
func (i *managedServiceIdentityResponsePtrType) ToManagedServiceIdentityResponsePtrOutputWithContext(ctx context.Context) ManagedServiceIdentityResponsePtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(ManagedServiceIdentityResponsePtrOutput)
}
// Managed identity generic object.
type ManagedServiceIdentityResponseOutput struct{ *pulumi.OutputState }
func (ManagedServiceIdentityResponseOutput) ElementType() reflect.Type {
return reflect.TypeOf((*ManagedServiceIdentityResponse)(nil)).Elem()
}
func (o ManagedServiceIdentityResponseOutput) ToManagedServiceIdentityResponseOutput() ManagedServiceIdentityResponseOutput {
return o
}
func (o ManagedServiceIdentityResponseOutput) ToManagedServiceIdentityResponseOutputWithContext(ctx context.Context) ManagedServiceIdentityResponseOutput {
return o
}
func (o ManagedServiceIdentityResponseOutput) ToManagedServiceIdentityResponsePtrOutput() ManagedServiceIdentityResponsePtrOutput {
return o.ToManagedServiceIdentityResponsePtrOutputWithContext(context.Background())
}
func (o ManagedServiceIdentityResponseOutput) ToManagedServiceIdentityResponsePtrOutputWithContext(ctx context.Context) ManagedServiceIdentityResponsePtrOutput {
return o.ApplyT(func(v ManagedServiceIdentityResponse) *ManagedServiceIdentityResponse {
return &v
}).(ManagedServiceIdentityResponsePtrOutput)
}
// ID of the Azure Active Directory.
func (o ManagedServiceIdentityResponseOutput) TenantId() pulumi.StringOutput {
return o.ApplyT(func(v ManagedServiceIdentityResponse) string { return v.TenantId }).(pulumi.StringOutput)
}
// Type of the managed identity.
func (o ManagedServiceIdentityResponseOutput) Type() pulumi.StringPtrOutput {
return o.ApplyT(func(v ManagedServiceIdentityResponse) *string { return v.Type }).(pulumi.StringPtrOutput)
}
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
func (o ManagedServiceIdentityResponseOutput) UserAssignedIdentities() UserAssignedIdentityResponseMapOutput {
return o.ApplyT(func(v ManagedServiceIdentityResponse) map[string]UserAssignedIdentityResponse {
return v.UserAssignedIdentities
}).(UserAssignedIdentityResponseMapOutput)
}
type ManagedServiceIdentityResponsePtrOutput struct{ *pulumi.OutputState }
func (ManagedServiceIdentityResponsePtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**ManagedServiceIdentityResponse)(nil)).Elem()
}
func (o ManagedServiceIdentityResponsePtrOutput) ToManagedServiceIdentityResponsePtrOutput() ManagedServiceIdentityResponsePtrOutput {
return o
}
func (o ManagedServiceIdentityResponsePtrOutput) ToManagedServiceIdentityResponsePtrOutputWithContext(ctx context.Context) ManagedServiceIdentityResponsePtrOutput {
return o
}
func (o ManagedServiceIdentityResponsePtrOutput) Elem() ManagedServiceIdentityResponseOutput {
return o.ApplyT(func(v *ManagedServiceIdentityResponse) ManagedServiceIdentityResponse { return *v }).(ManagedServiceIdentityResponseOutput)
}
// ID of the Azure Active Directory.
func (o ManagedServiceIdentityResponsePtrOutput) TenantId() pulumi.StringPtrOutput {
return o.ApplyT(func(v *ManagedServiceIdentityResponse) *string {
if v == nil {
return nil
}
return &v.TenantId
}).(pulumi.StringPtrOutput)
}
// Type of the managed identity.
func (o ManagedServiceIdentityResponsePtrOutput) Type() pulumi.StringPtrOutput {
return o.ApplyT(func(v *ManagedServiceIdentityResponse) *string {
if v == nil {
return nil
}
return v.Type
}).(pulumi.StringPtrOutput)
}
// The list of user-assigned managed identities associated with the resource. Key is the Azure resource Id of the managed identity.
func (o ManagedServiceIdentityResponsePtrOutput) UserAssignedIdentities() UserAssignedIdentityResponseMapOutput {
return o.ApplyT(func(v *ManagedServiceIdentityResponse) map[string]UserAssignedIdentityResponse {
if v == nil {
return nil
}
return v.UserAssignedIdentities
}).(UserAssignedIdentityResponseMapOutput)
}
// Metadata pertaining to creation and last modification of the resource.
type SystemDataResponse struct {
// The timestamp of resource creation (UTC).
CreatedAt *string `pulumi:"createdAt"`
// The identity that created the resource.
CreatedBy *string `pulumi:"createdBy"`
// The type of identity that created the resource.
CreatedByType *string `pulumi:"createdByType"`
// The timestamp of resource last modification (UTC)
LastModifiedAt *string `pulumi:"lastModifiedAt"`
// The identity that last modified the resource.
LastModifiedBy *string `pulumi:"lastModifiedBy"`
// The type of identity that last modified the resource.
LastModifiedByType *string `pulumi:"lastModifiedByType"`
}
// SystemDataResponseInput is an input type that accepts SystemDataResponseArgs and SystemDataResponseOutput values.
// You can construct a concrete instance of `SystemDataResponseInput` via:
//
// SystemDataResponseArgs{...}
type SystemDataResponseInput interface {
pulumi.Input
ToSystemDataResponseOutput() SystemDataResponseOutput
ToSystemDataResponseOutputWithContext(context.Context) SystemDataResponseOutput
}
// Metadata pertaining to creation and last modification of the resource.
type SystemDataResponseArgs struct {
// The timestamp of resource creation (UTC).
CreatedAt pulumi.StringPtrInput `pulumi:"createdAt"`
// The identity that created the resource.
CreatedBy pulumi.StringPtrInput `pulumi:"createdBy"`
// The type of identity that created the resource.
CreatedByType pulumi.StringPtrInput `pulumi:"createdByType"`
// The timestamp of resource last modification (UTC)
LastModifiedAt pulumi.StringPtrInput `pulumi:"lastModifiedAt"`
// The identity that last modified the resource.
LastModifiedBy pulumi.StringPtrInput `pulumi:"lastModifiedBy"`
// The type of identity that last modified the resource.
LastModifiedByType pulumi.StringPtrInput `pulumi:"lastModifiedByType"`
}
func (SystemDataResponseArgs) ElementType() reflect.Type {
return reflect.TypeOf((*SystemDataResponse)(nil)).Elem()
}
func (i SystemDataResponseArgs) ToSystemDataResponseOutput() SystemDataResponseOutput {
return i.ToSystemDataResponseOutputWithContext(context.Background())
}
func (i SystemDataResponseArgs) ToSystemDataResponseOutputWithContext(ctx context.Context) SystemDataResponseOutput {
return pulumi.ToOutputWithContext(ctx, i).(SystemDataResponseOutput)
}
func (i SystemDataResponseArgs) ToSystemDataResponsePtrOutput() SystemDataResponsePtrOutput {
return i.ToSystemDataResponsePtrOutputWithContext(context.Background())
}
func (i SystemDataResponseArgs) ToSystemDataResponsePtrOutputWithContext(ctx context.Context) SystemDataResponsePtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(SystemDataResponseOutput).ToSystemDataResponsePtrOutputWithContext(ctx)
}
// SystemDataResponsePtrInput is an input type that accepts SystemDataResponseArgs, SystemDataResponsePtr and SystemDataResponsePtrOutput values.
// You can construct a concrete instance of `SystemDataResponsePtrInput` via:
//
// SystemDataResponseArgs{...}
//
// or:
//
// nil
type SystemDataResponsePtrInput interface {
pulumi.Input
ToSystemDataResponsePtrOutput() SystemDataResponsePtrOutput
ToSystemDataResponsePtrOutputWithContext(context.Context) SystemDataResponsePtrOutput
}
type systemDataResponsePtrType SystemDataResponseArgs
func SystemDataResponsePtr(v *SystemDataResponseArgs) SystemDataResponsePtrInput {
return (*systemDataResponsePtrType)(v)
}
func (*systemDataResponsePtrType) ElementType() reflect.Type {
return reflect.TypeOf((**SystemDataResponse)(nil)).Elem()
}
func (i *systemDataResponsePtrType) ToSystemDataResponsePtrOutput() SystemDataResponsePtrOutput {
return i.ToSystemDataResponsePtrOutputWithContext(context.Background())
}
func (i *systemDataResponsePtrType) ToSystemDataResponsePtrOutputWithContext(ctx context.Context) SystemDataResponsePtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(SystemDataResponsePtrOutput)
}
// Metadata pertaining to creation and last modification of the resource.
type SystemDataResponseOutput struct{ *pulumi.OutputState }
func (SystemDataResponseOutput) ElementType() reflect.Type {
return reflect.TypeOf((*SystemDataResponse)(nil)).Elem()
}
func (o SystemDataResponseOutput) ToSystemDataResponseOutput() SystemDataResponseOutput {
return o
}
func (o SystemDataResponseOutput) ToSystemDataResponseOutputWithContext(ctx context.Context) SystemDataResponseOutput {
return o
}
func (o SystemDataResponseOutput) ToSystemDataResponsePtrOutput() SystemDataResponsePtrOutput {
return o.ToSystemDataResponsePtrOutputWithContext(context.Background())
}
func (o SystemDataResponseOutput) ToSystemDataResponsePtrOutputWithContext(ctx context.Context) SystemDataResponsePtrOutput {
return o.ApplyT(func(v SystemDataResponse) *SystemDataResponse {
return &v
}).(SystemDataResponsePtrOutput)
}
// The timestamp of resource creation (UTC).
func (o SystemDataResponseOutput) CreatedAt() pulumi.StringPtrOutput {
return o.ApplyT(func(v SystemDataResponse) *string { return v.CreatedAt }).(pulumi.StringPtrOutput)
}
// The identity that created the resource.
func (o SystemDataResponseOutput) CreatedBy() pulumi.StringPtrOutput {
return o.ApplyT(func(v SystemDataResponse) *string { return v.CreatedBy }).(pulumi.StringPtrOutput)
}
// The type of identity that created the resource.
func (o SystemDataResponseOutput) CreatedByType() pulumi.StringPtrOutput {
return o.ApplyT(func(v SystemDataResponse) *string { return v.CreatedByType }).(pulumi.StringPtrOutput)
}
// The timestamp of resource last modification (UTC)
func (o SystemDataResponseOutput) LastModifiedAt() pulumi.StringPtrOutput {
return o.ApplyT(func(v SystemDataResponse) *string { return v.LastModifiedAt }).(pulumi.StringPtrOutput)
}
// The identity that last modified the resource.
func (o SystemDataResponseOutput) LastModifiedBy() pulumi.StringPtrOutput {
return o.ApplyT(func(v SystemDataResponse) *string { return v.LastModifiedBy }).(pulumi.StringPtrOutput)
}
// The type of identity that last modified the resource.
func (o SystemDataResponseOutput) LastModifiedByType() pulumi.StringPtrOutput {
return o.ApplyT(func(v SystemDataResponse) *string { return v.LastModifiedByType }).(pulumi.StringPtrOutput)
}
type SystemDataResponsePtrOutput struct{ *pulumi.OutputState }
func (SystemDataResponsePtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**SystemDataResponse)(nil)).Elem()
}
func (o SystemDataResponsePtrOutput) ToSystemDataResponsePtrOutput() SystemDataResponsePtrOutput {
return o
}
func (o SystemDataResponsePtrOutput) ToSystemDataResponsePtrOutputWithContext(ctx context.Context) SystemDataResponsePtrOutput {
return o
}
func (o SystemDataResponsePtrOutput) Elem() SystemDataResponseOutput {
return o.ApplyT(func(v *SystemDataResponse) SystemDataResponse { return *v }).(SystemDataResponseOutput)
}
// The timestamp of resource creation (UTC).
func (o SystemDataResponsePtrOutput) CreatedAt() pulumi.StringPtrOutput {
return o.ApplyT(func(v *SystemDataResponse) *string {
if v == nil {
return nil
}
return v.CreatedAt
}).(pulumi.StringPtrOutput)
}
// The identity that created the resource.
func (o SystemDataResponsePtrOutput) CreatedBy() pulumi.StringPtrOutput {
return o.ApplyT(func(v *SystemDataResponse) *string {
if v == nil {
return nil
}
return v.CreatedBy
}).(pulumi.StringPtrOutput)
}
// The type of identity that created the resource.
func (o SystemDataResponsePtrOutput) CreatedByType() pulumi.StringPtrOutput {
return o.ApplyT(func(v *SystemDataResponse) *string {
if v == nil {
return nil
}
return v.CreatedByType
}).(pulumi.StringPtrOutput)
}
// The timestamp of resource last modification (UTC)
func (o SystemDataResponsePtrOutput) LastModifiedAt() pulumi.StringPtrOutput {
return o.ApplyT(func(v *SystemDataResponse) *string {
if v == nil {
return nil
}
return v.LastModifiedAt
}).(pulumi.StringPtrOutput)
}
// The identity that last modified the resource.
func (o SystemDataResponsePtrOutput) LastModifiedBy() pulumi.StringPtrOutput {
return o.ApplyT(func(v *SystemDataResponse) *string {
if v == nil {
return nil
}
return v.LastModifiedBy
}).(pulumi.StringPtrOutput)
}
// The type of identity that last modified the resource.
func (o SystemDataResponsePtrOutput) LastModifiedByType() pulumi.StringPtrOutput {
return o.ApplyT(func(v *SystemDataResponse) *string {
if v == nil {
return nil
}
return v.LastModifiedByType
}).(pulumi.StringPtrOutput)
}
// User-assigned managed identity.
type UserAssignedIdentityResponse struct {
// Client App Id associated with this identity.
ClientId string `pulumi:"clientId"`
// Azure Active Directory principal ID associated with this identity.
PrincipalId string `pulumi:"principalId"`
}
// UserAssignedIdentityResponseInput is an input type that accepts UserAssignedIdentityResponseArgs and UserAssignedIdentityResponseOutput values.
// You can construct a concrete instance of `UserAssignedIdentityResponseInput` via:
//
// UserAssignedIdentityResponseArgs{...}
type UserAssignedIdentityResponseInput interface {
pulumi.Input
ToUserAssignedIdentityResponseOutput() UserAssignedIdentityResponseOutput
ToUserAssignedIdentityResponseOutputWithContext(context.Context) UserAssignedIdentityResponseOutput
}
// User-assigned managed identity.
type UserAssignedIdentityResponseArgs struct {
// Client App Id associated with this identity.
ClientId pulumi.StringInput `pulumi:"clientId"`
// Azure Active Directory principal ID associated with this identity.
PrincipalId pulumi.StringInput `pulumi:"principalId"`
}
func (UserAssignedIdentityResponseArgs) ElementType() reflect.Type {
return reflect.TypeOf((*UserAssignedIdentityResponse)(nil)).Elem()
}
func (i UserAssignedIdentityResponseArgs) ToUserAssignedIdentityResponseOutput() UserAssignedIdentityResponseOutput {
return i.ToUserAssignedIdentityResponseOutputWithContext(context.Background())
}
func (i UserAssignedIdentityResponseArgs) ToUserAssignedIdentityResponseOutputWithContext(ctx context.Context) UserAssignedIdentityResponseOutput {
return pulumi.ToOutputWithContext(ctx, i).(UserAssignedIdentityResponseOutput)
}
// UserAssignedIdentityResponseMapInput is an input type that accepts UserAssignedIdentityResponseMap and UserAssignedIdentityResponseMapOutput values.
// You can construct a concrete instance of `UserAssignedIdentityResponseMapInput` via:
//
// UserAssignedIdentityResponseMap{ "key": UserAssignedIdentityResponseArgs{...} }
type UserAssignedIdentityResponseMapInput interface {
pulumi.Input
ToUserAssignedIdentityResponseMapOutput() UserAssignedIdentityResponseMapOutput
ToUserAssignedIdentityResponseMapOutputWithContext(context.Context) UserAssignedIdentityResponseMapOutput
}
type UserAssignedIdentityResponseMap map[string]UserAssignedIdentityResponseInput
func (UserAssignedIdentityResponseMap) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]UserAssignedIdentityResponse)(nil)).Elem()
}
func (i UserAssignedIdentityResponseMap) ToUserAssignedIdentityResponseMapOutput() UserAssignedIdentityResponseMapOutput {
return i.ToUserAssignedIdentityResponseMapOutputWithContext(context.Background())
}
func (i UserAssignedIdentityResponseMap) ToUserAssignedIdentityResponseMapOutputWithContext(ctx context.Context) UserAssignedIdentityResponseMapOutput {
return pulumi.ToOutputWithContext(ctx, i).(UserAssignedIdentityResponseMapOutput)
}
// User-assigned managed identity.
type UserAssignedIdentityResponseOutput struct{ *pulumi.OutputState }
func (UserAssignedIdentityResponseOutput) ElementType() reflect.Type {
return reflect.TypeOf((*UserAssignedIdentityResponse)(nil)).Elem()
}
func (o UserAssignedIdentityResponseOutput) ToUserAssignedIdentityResponseOutput() UserAssignedIdentityResponseOutput {
return o
}
func (o UserAssignedIdentityResponseOutput) ToUserAssignedIdentityResponseOutputWithContext(ctx context.Context) UserAssignedIdentityResponseOutput {
return o
}
// Client App Id associated with this identity.
func (o UserAssignedIdentityResponseOutput) ClientId() pulumi.StringOutput {
return o.ApplyT(func(v UserAssignedIdentityResponse) string { return v.ClientId }).(pulumi.StringOutput)
}
// Azure Active Directory principal ID associated with this identity.
func (o UserAssignedIdentityResponseOutput) PrincipalId() pulumi.StringOutput {
return o.ApplyT(func(v UserAssignedIdentityResponse) string { return v.PrincipalId }).(pulumi.StringOutput)
}
type UserAssignedIdentityResponseMapOutput struct{ *pulumi.OutputState }
func (UserAssignedIdentityResponseMapOutput) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]UserAssignedIdentityResponse)(nil)).Elem()
}
func (o UserAssignedIdentityResponseMapOutput) ToUserAssignedIdentityResponseMapOutput() UserAssignedIdentityResponseMapOutput {
return o
}
func (o UserAssignedIdentityResponseMapOutput) ToUserAssignedIdentityResponseMapOutputWithContext(ctx context.Context) UserAssignedIdentityResponseMapOutput {
return o
}
func (o UserAssignedIdentityResponseMapOutput) MapIndex(k pulumi.StringInput) UserAssignedIdentityResponseOutput {
return pulumi.All(o, k).ApplyT(func(vs []interface{}) UserAssignedIdentityResponse {
return vs[0].(map[string]UserAssignedIdentityResponse)[vs[1].(string)]
}).(UserAssignedIdentityResponseOutput)
}
func init() {
pulumi.RegisterOutputType(ManagedServiceIdentityOutput{})
pulumi.RegisterOutputType(ManagedServiceIdentityPtrOutput{})
pulumi.RegisterOutputType(ManagedServiceIdentityResponseOutput{})
pulumi.RegisterOutputType(ManagedServiceIdentityResponsePtrOutput{})
pulumi.RegisterOutputType(SystemDataResponseOutput{})
pulumi.RegisterOutputType(SystemDataResponsePtrOutput{})
pulumi.RegisterOutputType(UserAssignedIdentityResponseOutput{})
pulumi.RegisterOutputType(UserAssignedIdentityResponseMapOutput{})
}
| ManagedServiceIdentityResponsePtr |
config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
# Keeping track of various configurations of the Flask app
class Config(object):
| ALLOWED_EXTENSIONS = set(['csv'])
UPLOAD_FOLDER = './files/uploads/'
DOWNLOAD_FOLDER = './files/downloads/'
SECRET_KEY = os.environ.get('SECRET_KEY') or \
'uUHQMFSPB9H7G4bGwzFLDetrIyb4M8tj'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
REDIS_URL = os.environ.get('REDIS_URL') or 'redis://'
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL') or \
'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND') or \
'redis://localhost:6379/0' |
|
example.py | def add(a,b):
return a + b
def subtract(a,b):
return a - b
def | (a,b):
return a * b
| product |
nga.py | """
Official page for Nigeria COVID figures:
https://covid19.ncdc.gov.ng/
"""
import logging
import os
import re
from bs4 import BeautifulSoup
import requests
from .country_scraper import CountryScraper
logger = logging.getLogger(__name__)
class Nga(CountryScraper):
| def fetch(self):
url = 'https://covid19.ncdc.gov.ng/'
response = requests.get(url)
saved_file = self.save_to_raw_cache(response.text, 'html')
return saved_file
def extract(self, source_file):
scrape_date = self.runtimestamp
with open(source_file) as fh:
soup = BeautifulSoup(fh.read(), 'html.parser')
headers = [h.text for h in soup.table.thead.find_all('th')]
headers.extend(['date', 'scrape_date'])
data = []
tbody_rows = soup.table.tbody.find_all('tr')
for tr in tbody_rows:
cells = [
cell.text.strip().replace(',','')
for cell in tr.find_all('td')
]
cells.extend(['', scrape_date])
data.append(cells)
outfile = self.processed_filepath_from_raw(source_file, 'csv')
merged_data = [headers]
merged_data.extend(data)
self.write_csv(merged_data, outfile)
return outfile |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.