file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
objectdatatypes.go
|
package automation
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// ObjectDataTypesClient is the automation Client
type ObjectDataTypesClient struct {
BaseClient
}
// NewObjectDataTypesClient creates an instance of the ObjectDataTypesClient client.
func NewObjectDataTypesClient(subscriptionID string) ObjectDataTypesClient {
return NewObjectDataTypesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewObjectDataTypesClientWithBaseURI creates an instance of the ObjectDataTypesClient client using a custom endpoint.
// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewObjectDataTypesClientWithBaseURI(baseURI string, subscriptionID string) ObjectDataTypesClient {
return ObjectDataTypesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// ListFieldsByModuleAndType retrieve a list of fields of a given type identified by module name.
// Parameters:
// resourceGroupName - name of an Azure Resource group.
// automationAccountName - the name of the automation account.
// moduleName - the name of module.
// typeName - the name of type.
func (client ObjectDataTypesClient) ListFieldsByModuleAndType(ctx context.Context, resourceGroupName string, automationAccountName string, moduleName string, typeName string) (result TypeFieldListResult, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ObjectDataTypesClient.ListFieldsByModuleAndType")
defer func() {
sc := -1
if result.Response.Response != nil
|
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: resourceGroupName,
Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil},
{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.ObjectDataTypesClient", "ListFieldsByModuleAndType", err.Error())
}
req, err := client.ListFieldsByModuleAndTypePreparer(ctx, resourceGroupName, automationAccountName, moduleName, typeName)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.ObjectDataTypesClient", "ListFieldsByModuleAndType", nil, "Failure preparing request")
return
}
resp, err := client.ListFieldsByModuleAndTypeSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "automation.ObjectDataTypesClient", "ListFieldsByModuleAndType", resp, "Failure sending request")
return
}
result, err = client.ListFieldsByModuleAndTypeResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.ObjectDataTypesClient", "ListFieldsByModuleAndType", resp, "Failure responding to request")
return
}
return
}
// ListFieldsByModuleAndTypePreparer prepares the ListFieldsByModuleAndType request.
func (client ObjectDataTypesClient) ListFieldsByModuleAndTypePreparer(ctx context.Context, resourceGroupName string, automationAccountName string, moduleName string, typeName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"moduleName": autorest.Encode("path", moduleName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"typeName": autorest.Encode("path", typeName),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/modules/{moduleName}/objectDataTypes/{typeName}/fields", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListFieldsByModuleAndTypeSender sends the ListFieldsByModuleAndType request. The method will close the
// http.Response Body if it receives an error.
func (client ObjectDataTypesClient) ListFieldsByModuleAndTypeSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListFieldsByModuleAndTypeResponder handles the response to the ListFieldsByModuleAndType request. The method always
// closes the http.Response Body.
func (client ObjectDataTypesClient) ListFieldsByModuleAndTypeResponder(resp *http.Response) (result TypeFieldListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListFieldsByType retrieve a list of fields of a given type across all accessible modules.
// Parameters:
// resourceGroupName - name of an Azure Resource group.
// automationAccountName - the name of the automation account.
// typeName - the name of type.
func (client ObjectDataTypesClient) ListFieldsByType(ctx context.Context, resourceGroupName string, automationAccountName string, typeName string) (result TypeFieldListResult, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ObjectDataTypesClient.ListFieldsByType")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: resourceGroupName,
Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil},
{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.ObjectDataTypesClient", "ListFieldsByType", err.Error())
}
req, err := client.ListFieldsByTypePreparer(ctx, resourceGroupName, automationAccountName, typeName)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.ObjectDataTypesClient", "ListFieldsByType", nil, "Failure preparing request")
return
}
resp, err := client.ListFieldsByTypeSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "automation.ObjectDataTypesClient", "ListFieldsByType", resp, "Failure sending request")
return
}
result, err = client.ListFieldsByTypeResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.ObjectDataTypesClient", "ListFieldsByType", resp, "Failure responding to request")
return
}
return
}
// ListFieldsByTypePreparer prepares the ListFieldsByType request.
func (client ObjectDataTypesClient) ListFieldsByTypePreparer(ctx context.Context, resourceGroupName string, automationAccountName string, typeName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"typeName": autorest.Encode("path", typeName),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/objectDataTypes/{typeName}/fields", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListFieldsByTypeSender sends the ListFieldsByType request. The method will close the
// http.Response Body if it receives an error.
func (client ObjectDataTypesClient) ListFieldsByTypeSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListFieldsByTypeResponder handles the response to the ListFieldsByType request. The method always
// closes the http.Response Body.
func (client ObjectDataTypesClient) ListFieldsByTypeResponder(resp *http.Response) (result TypeFieldListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
|
{
sc = result.Response.Response.StatusCode
}
|
parser.py
|
""" Functions for parsing database and shell commands """
import shlex
from typing import Tuple, Callable, List, Dict, Generator
from .constants import SHELL_COMMAND_INDICATOR
from .exc import WrongNumberOfArgumentsError, UnknownCommandError
def parse(command_string: str, shell_command_lookup: Dict[str, dict]) -> Tuple[Callable, List]:
""" Parse a string of commands and arguments into a (command function, arguments) tuple
:param command_string: String of command + arguments
:param shell_command_lookup: Dict of shell commands to compare against command_string
"""
# It's assumed any string not starting with SHELL_COMMAND_INDICATOR
# is a command that should be sent directly to the db
if command_string.startswith(SHELL_COMMAND_INDICATOR):
return _parse_shell_command(
command_string=command_string,
shell_command_lookup=shell_command_lookup
)
else:
return _parse_db_command(
command_string=command_string,
shell_command_lookup=shell_command_lookup
)
def unquote(s: str) -> str:
""" Remove starting and ending quotes, only if quoted on both sides
:param s: String to strip leading/trailing quotes from
"""
# Don't strip if the string is too short
if len(s) < 2:
return s
# Get first and last character
start_char, end_char = s[0], s[-1]
# Only strip quotes if it starts and ends with the same quote type
if start_char == end_char and start_char in {'"', "'"}:
return s.strip(start_char)
else:
return s
def tokenize(s: str, maximum_tokens: [int, None] = None) -> Generator[str, None, None]:
""" Split a string containing whitespace-separated strings into tokens
Uses shlex for tokenization. If the number of tokens returned by shlex
would exceed maximum_tokens, the rest of the string is returned
as the last token.
:param s: String to parse
:param maximum_tokens: Maximum number of parsed arguments to return
"""
if isinstance(maximum_tokens, int) and maximum_tokens < 1:
raise ValueError("Maximum tokens must be None or greater than 0")
# This is tracked so we can return the rest of the string
# when we reach maximum_tokens, rather than wasting time
# tokenizing and recombining to form the final token
string_position = 0
token_number = 0
# Use shlex for parsing, splitting on whitespace only
lex = shlex.shlex(s)
lex.whitespace_split = True
for token in lex:
token_number += 1
# Return the rest of the string when we've
# reached the final token
if token_number == maximum_tokens:
rest_of_string = s[string_position:]
yield unquote(rest_of_string)
break
string_position += len(token) + 1
yield unquote(token)
def _parse_shell_command(command_string: str, shell_command_lookup: Dict[str, dict]) -> Tuple[Callable, List]:
""" Parse a shell (non-db) command into a (command function, arguments) tuple
:param command_string: String of command + arguments
:param shell_command_lookup: Dict of shell commands to compare against command_string
"""
# Break the command_string into a command string and arguments string
command, argument_string = _split_command_string(command_string)
# Get information about the command that was parsed out
command_details = _find_command_details(
command=command,
shell_command_lookup=shell_command_lookup
)
# Determine how many arguments we should expect
number_of_arguments = len(command_details["arguments"])
# Convert argument_string into a list of arguments
arguments = _parse_arguments(
argument_string=argument_string,
number_of_arguments=number_of_arguments,
verbose_final_argument=command_details["verbose_final_argument"]
)
# Raise an error if too many or too few arguments are provided
# (Optional arguments currently aren't a thing)
if len(arguments) != number_of_arguments:
message = f"Command '{command}' expects {number_of_arguments} arguments, got {len(arguments)}"
raise WrongNumberOfArgumentsError(message)
return command_details["func"], arguments
def _split_command_string(command_string: str) -> Tuple[str, str]:
|
def _find_command_details(command: str, shell_command_lookup: Dict[str, dict]):
""" Retrieve details about a given command
:param command: Command to search for
:param shell_command_lookup: Dict of shell commands to search in
"""
try:
return shell_command_lookup[command]
except KeyError:
raise UnknownCommandError(f"Command '{command}' not recognized")
def _parse_arguments(argument_string: str, number_of_arguments: int, verbose_final_argument: bool) -> List[str]:
""" Convert an argument_string into number_of_arguments arguments
:param argument_string: String containing all arguments to be parsed
:param number_of_arguments: How many arguments should be returned
:param verbose_final_argument: If True, ignore whitespace for generating final argument
"""
# Limit the number of arguments returned if we're doing a
# verbose final argument (the final argument will contain
# the rest of the string), else return as many as we can find
maximum_arguments = number_of_arguments if verbose_final_argument else None
# Get a generator that yield arguments
parser = tokenize(
s=argument_string,
maximum_tokens=maximum_arguments
)
return list(parser)
def _parse_db_command(command_string: str, shell_command_lookup: Dict[str, dict]) -> Tuple[Callable, List]:
""" Parse a database command into (command function, arguments) tuple
:param command_string: String of command + arguments
:param shell_command_lookup: Dict of shell commands for looking up execute function
"""
func = shell_command_lookup["execute"]["func"]
return func, [command_string]
|
""" Split a string into a (command, argument string) tuple
:param command_string: String of command + arguments
"""
# Remove the command character prefix
command_string_sans_prefix = command_string[len(SHELL_COMMAND_INDICATOR):]
try:
first_space = command_string_sans_prefix.index(" ")
except ValueError:
return command_string_sans_prefix, ""
# Get the command from the string
command = command_string_sans_prefix[:first_space]
# Get the arguments portion of the string
arguments = command_string_sans_prefix[first_space + 1:]
return command, arguments
|
lib.rs
|
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::{ensure, Error},
fidl_fuchsia_hardware_block as block,
fuchsia_async::{self as fasync, FifoReadable, FifoWritable},
fuchsia_zircon::{self as zx, HandleBased},
futures::channel::oneshot,
std::{
collections::HashMap,
convert::TryInto,
future::Future,
ops::DerefMut,
pin::Pin,
sync::{Arc, Mutex},
task::{Context, Poll, Waker},
},
};
pub use cache::Cache;
pub mod cache;
const BLOCK_VMOID_INVALID: u16 = 0;
const TEMP_VMO_SIZE: usize = 65536;
const BLOCKIO_READ: u32 = 1;
const BLOCKIO_WRITE: u32 = 2;
const BLOCKIO_FLUSH: u32 = 3;
const _BLOCKIO_TRIM: u32 = 4;
const BLOCKIO_CLOSE_VMO: u32 = 5;
#[repr(C)]
#[derive(Default)]
struct BlockFifoRequest {
op_code: u32,
request_id: u32,
group_id: u16,
vmoid: u16,
block_count: u32,
vmo_block: u64,
device_block: u64,
}
#[repr(C)]
#[derive(Default)]
struct BlockFifoResponse {
status: i32,
request_id: u32,
group_id: u16,
reserved1: u16,
count: u32,
reserved2: u64,
reserved3: u64,
}
unsafe impl fasync::FifoEntry for BlockFifoRequest {}
unsafe impl fasync::FifoEntry for BlockFifoResponse {}
pub enum BufferSlice<'a> {
VmoId { vmo_id: &'a VmoId, offset: u64, length: u64 },
Memory(&'a [u8]),
}
impl<'a> BufferSlice<'a> {
pub fn new_with_vmo_id(vmo_id: &'a VmoId, offset: u64, length: u64) -> Self {
BufferSlice::VmoId { vmo_id, offset, length }
}
}
impl<'a> From<&'a [u8]> for BufferSlice<'a> {
fn from(buf: &'a [u8]) -> Self {
BufferSlice::Memory(buf)
}
}
pub enum MutableBufferSlice<'a> {
VmoId { vmo_id: &'a VmoId, offset: u64, length: u64 },
Memory(&'a mut [u8]),
}
impl<'a> MutableBufferSlice<'a> {
pub fn new_with_vmo_id(vmo_id: &'a VmoId, offset: u64, length: u64) -> Self {
MutableBufferSlice::VmoId { vmo_id, offset, length }
}
}
impl<'a> From<&'a mut [u8]> for MutableBufferSlice<'a> {
fn from(buf: &'a mut [u8]) -> Self {
MutableBufferSlice::Memory(buf)
}
}
#[derive(Default)]
struct RequestState {
result: Option<zx::Status>,
waker: Option<Waker>,
}
#[derive(Default)]
struct FifoState {
// The fifo.
fifo: Option<fasync::Fifo<BlockFifoResponse, BlockFifoRequest>>,
// The next request ID to be used.
next_request_id: u32,
// A queue of messages to be sent on the fifo.
queue: std::collections::VecDeque<BlockFifoRequest>,
// Map from request ID to RequestState.
map: HashMap<u32, RequestState>,
// The waker for the FifoPoller.
poller_waker: Option<Waker>,
}
impl FifoState {
fn terminate(&mut self) {
self.fifo.take();
for (_, request_state) in self.map.iter_mut() {
request_state.result.get_or_insert(zx::Status::CANCELED);
if let Some(waker) = request_state.waker.take() {
waker.wake();
}
}
if let Some(waker) = self.poller_waker.take() {
waker.wake();
}
}
}
type FifoStateRef = Arc<Mutex<FifoState>>;
// A future used for fifo responses.
struct ResponseFuture {
request_id: u32,
fifo_state: FifoStateRef,
}
impl ResponseFuture {
fn new(fifo_state: FifoStateRef, request_id: u32) -> Self {
ResponseFuture { request_id, fifo_state }
}
}
impl Future for ResponseFuture {
type Output = Result<(), zx::Status>;
fn poll(self: Pin<&mut Self>, context: &mut Context<'_>) -> Poll<Self::Output> {
let mut state = self.fifo_state.lock().unwrap();
let request_state = state.map.get_mut(&self.request_id).unwrap();
if let Some(result) = request_state.result {
Poll::Ready(result.into())
} else {
request_state.waker.replace(context.waker().clone());
Poll::Pending
}
}
}
impl Drop for ResponseFuture {
fn drop(&mut self) {
self.fifo_state.lock().unwrap().map.remove(&self.request_id).unwrap();
}
}
/// Wraps a vmo-id. Will panic if you forget to detach.
pub struct VmoId(u16);
impl VmoId {
fn take(&mut self) -> VmoId {
let vmo_id = VmoId(self.0);
self.0 = BLOCK_VMOID_INVALID;
vmo_id
}
fn into_id(mut self) -> u16 {
let id = self.0;
self.0 = BLOCK_VMOID_INVALID;
id
}
fn id(&self) -> u16 {
self.0
}
}
impl Drop for VmoId {
fn drop(&mut self) {
assert_eq!(self.0, BLOCK_VMOID_INVALID, "Did you forget to detach?");
}
}
/// Represents a connection to a remote block device.
pub struct RemoteBlockDevice {
device: Mutex<block::BlockSynchronousProxy>,
block_size: u32,
block_count: u64,
fifo_state: FifoStateRef,
temp_vmo: futures::lock::Mutex<zx::Vmo>,
temp_vmo_id: VmoId,
}
impl RemoteBlockDevice {
/// Returns a connection to a remote block device via the given channel.
pub fn new(channel: zx::Channel) -> Result<Self, Error> {
let device = Self::from_channel(channel)?;
fasync::Task::spawn(FifoPoller { fifo_state: device.fifo_state.clone() }).detach();
Ok(device)
}
/// Returns a connection to a remote block device via the given channel, but spawns a separate
/// thread for polling the fifo which makes it work in cases where no executor is configured for
/// the calling thread.
pub fn new_sync(channel: zx::Channel) -> Result<Self, Error> {
// The fifo needs to be instantiated from the thread that has the executor as that's where
// the fifo registers for notifications to be delivered.
let (sender, receiver) = oneshot::channel::<Result<Self, Error>>();
std::thread::spawn(move || {
let mut executor = fasync::Executor::new().expect("failed to create executor");
let maybe_device = RemoteBlockDevice::from_channel(channel);
let fifo_state = maybe_device.as_ref().ok().map(|device| device.fifo_state.clone());
let _ = sender.send(maybe_device);
if let Some(fifo_state) = fifo_state {
executor.run_singlethreaded(FifoPoller { fifo_state });
}
});
futures::executor::block_on(receiver).unwrap()
}
fn from_channel(channel: zx::Channel) -> Result<Self, Error> {
let mut block_device = block::BlockSynchronousProxy::new(channel);
let (status, maybe_info) = block_device.get_info(zx::Time::INFINITE)?;
let info = maybe_info.ok_or(zx::Status::from_raw(status))?;
let (status, maybe_fifo) = block_device.get_fifo(zx::Time::INFINITE)?;
let fifo = fasync::Fifo::from_fifo(maybe_fifo.ok_or(zx::Status::from_raw(status))?)?;
let fifo_state = Arc::new(Mutex::new(FifoState { fifo: Some(fifo), ..Default::default() }));
let temp_vmo = zx::Vmo::create(TEMP_VMO_SIZE as u64)?;
let (status, maybe_vmo_id) = block_device
.attach_vmo(temp_vmo.duplicate_handle(zx::Rights::SAME_RIGHTS)?, zx::Time::INFINITE)?;
let temp_vmo_id = VmoId(maybe_vmo_id.ok_or(zx::Status::from_raw(status))?.id);
let device = Self {
device: Mutex::new(block_device),
block_size: info.block_size,
block_count: info.block_count,
fifo_state,
temp_vmo: futures::lock::Mutex::new(temp_vmo),
temp_vmo_id,
};
Ok(device)
}
/// Wraps AttachVmo from fuchsia.hardware.block::Block.
pub fn attach_vmo(&self, vmo: &zx::Vmo) -> Result<VmoId, Error> {
let mut device = self.device.lock().unwrap();
let (status, maybe_vmo_id) = device
.attach_vmo(vmo.duplicate_handle(zx::Rights::SAME_RIGHTS)?, zx::Time::INFINITE)?;
Ok(VmoId(maybe_vmo_id.ok_or(zx::Status::from_raw(status))?.id))
}
/// Detaches the given vmo-id from the device.
pub async fn detach_vmo(&self, vmo_id: VmoId) -> Result<(), Error> {
self.send(BlockFifoRequest {
op_code: BLOCKIO_CLOSE_VMO,
vmoid: vmo_id.into_id(),
..Default::default()
})
.await
}
fn to_blocks(&self, bytes: u64) -> Result<u64, Error> {
ensure!(bytes % self.block_size as u64 == 0, "bad alignment");
Ok(bytes / self.block_size as u64)
}
// Sends the request and waits for the response.
async fn send(&self, mut request: BlockFifoRequest) -> Result<(), Error> {
let request_id;
{
let mut state = self.fifo_state.lock().unwrap();
if state.fifo.is_none() {
// Fifo has been closed.
return Err(zx::Status::CANCELED.into());
}
request_id = state.next_request_id;
state.next_request_id = state.next_request_id.overflowing_add(1).0;
assert!(
state.map.insert(request_id, RequestState::default()).is_none(),
"request id in use!"
);
request.request_id = request_id;
state.queue.push_back(request);
if let Some(waker) = state.poller_waker.take() {
waker.wake();
}
}
Ok(ResponseFuture::new(self.fifo_state.clone(), request_id).await?)
}
/// Reads from the device at |device_offset| into the given buffer slice.
pub async fn read_at(
&self,
buffer_slice: MutableBufferSlice<'_>,
device_offset: u64,
) -> Result<(), Error> {
match buffer_slice {
MutableBufferSlice::VmoId { vmo_id, offset, length } => {
self.send(BlockFifoRequest {
op_code: BLOCKIO_READ,
vmoid: vmo_id.id(),
block_count: self.to_blocks(length)?.try_into()?,
vmo_block: self.to_blocks(offset)?,
device_block: self.to_blocks(device_offset)?,
..Default::default()
})
.await?
}
MutableBufferSlice::Memory(mut slice) => {
let temp_vmo = self.temp_vmo.lock().await;
let mut device_block = self.to_blocks(device_offset)?;
loop {
let to_do = std::cmp::min(TEMP_VMO_SIZE, slice.len());
let block_count = self.to_blocks(to_do as u64)? as u32;
self.send(BlockFifoRequest {
op_code: BLOCKIO_READ,
vmoid: self.temp_vmo_id.id(),
block_count: block_count,
vmo_block: 0,
device_block: device_block,
..Default::default()
})
.await?;
temp_vmo.read(&mut slice[..to_do], 0)?;
if to_do == slice.len() {
break;
}
device_block += block_count as u64;
slice = &mut slice[to_do..];
}
}
}
Ok(())
}
/// Writes the data in |buffer_slice| to the device.
pub async fn write_at(
&self,
buffer_slice: BufferSlice<'_>,
device_offset: u64,
) -> Result<(), Error> {
match buffer_slice {
BufferSlice::VmoId { vmo_id, offset, length } => {
self.send(BlockFifoRequest {
op_code: BLOCKIO_WRITE,
vmoid: vmo_id.id(),
block_count: self.to_blocks(length)?.try_into()?,
vmo_block: self.to_blocks(offset)?,
device_block: self.to_blocks(device_offset)?,
..Default::default()
})
.await?;
}
BufferSlice::Memory(mut slice) => {
let temp_vmo = self.temp_vmo.lock().await;
let mut device_block = self.to_blocks(device_offset)?;
loop {
let to_do = std::cmp::min(TEMP_VMO_SIZE, slice.len());
let block_count = self.to_blocks(to_do as u64)? as u32;
temp_vmo.write(&slice[..to_do], 0)?;
self.send(BlockFifoRequest {
op_code: BLOCKIO_WRITE,
vmoid: self.temp_vmo_id.id(),
block_count: block_count,
vmo_block: 0,
device_block: device_block,
..Default::default()
})
.await?;
if to_do == slice.len() {
break;
}
device_block += block_count as u64;
slice = &slice[to_do..];
}
}
}
Ok(())
}
// Flush all data
pub async fn flush(&self) -> Result<(), Error> {
self.send(BlockFifoRequest {
op_code: BLOCKIO_FLUSH,
vmoid: BLOCK_VMOID_INVALID,
..Default::default()
})
.await
|
}
pub fn block_size(&self) -> u32 {
self.block_size
}
pub fn block_count(&self) -> u64 {
self.block_count
}
/// Returns true if the remote fifo is still connected.
pub fn is_connected(&self) -> bool {
self.fifo_state.lock().unwrap().fifo.is_some()
}
}
impl Drop for RemoteBlockDevice {
fn drop(&mut self) {
// It's OK to leak the VMO id because the server will dump all VMOs when the fifo is torn
// down.
self.temp_vmo_id.take().into_id();
// Ignore errors here as there is not much we can do about it.
let _ = self.device.lock().unwrap().close_fifo(zx::Time::INFINITE);
}
}
// FifoPoller is a future responsible for sending and receiving from the fifo.
struct FifoPoller {
fifo_state: FifoStateRef,
}
impl Future for FifoPoller {
type Output = ();
fn poll(self: Pin<&mut Self>, context: &mut Context<'_>) -> Poll<Self::Output> {
let mut state_lock = self.fifo_state.lock().unwrap();
let state = state_lock.deref_mut(); // So that we can split the borrow.
let fifo = if let Some(fifo) = state.fifo.as_ref() {
fifo
} else {
return Poll::Ready(());
};
// Send requests.
loop {
let slice = state.queue.as_slices().0;
if slice.is_empty() {
break;
}
match fifo.write(context, slice) {
Poll::Ready(Ok(sent)) => {
state.queue.drain(0..sent);
}
Poll::Ready(Err(_)) => {
state.terminate();
return Poll::Ready(());
}
Poll::Pending => {
break;
}
}
}
// Receive responses.
while let Poll::Ready(result) = fifo.read(context) {
match result {
Ok(Some(response)) => {
let request_id = response.request_id;
// If the request isn't in the map, assume that it's a cancelled read.
if let Some(request_state) = state.map.get_mut(&request_id) {
request_state.result.replace(zx::Status::from_raw(response.status));
if let Some(waker) = request_state.waker.take() {
waker.wake();
}
}
}
_ => {
state.terminate();
return Poll::Ready(());
}
}
}
state.poller_waker = Some(context.waker().clone());
Poll::Pending
}
}
#[cfg(test)]
mod tests {
use {
super::{
BlockFifoRequest, BlockFifoResponse, BufferSlice, MutableBufferSlice, RemoteBlockDevice,
},
fidl_fuchsia_hardware_block::{self as block, BlockRequest},
fuchsia_async::{self as fasync, FifoReadable, FifoWritable},
fuchsia_zircon as zx,
futures::{
future::{AbortHandle, Abortable, TryFutureExt},
join,
stream::{futures_unordered::FuturesUnordered, StreamExt},
},
ramdevice_client::RamdiskClient,
};
const RAMDISK_BLOCK_SIZE: u64 = 1024;
const RAMDISK_BLOCK_COUNT: u64 = 1024;
pub fn make_ramdisk() -> (RamdiskClient, RemoteBlockDevice) {
isolated_driver_manager::launch_isolated_driver_manager()
.expect("launch_isolated_driver_manager failed");
ramdevice_client::wait_for_device("/dev/misc/ramctl", std::time::Duration::from_secs(10))
.expect("ramctl did not appear");
let ramdisk = RamdiskClient::create(RAMDISK_BLOCK_SIZE, RAMDISK_BLOCK_COUNT)
.expect("RamdiskClient::create failed");
let remote_block_device =
RemoteBlockDevice::new(ramdisk.open().expect("ramdisk.open failed"))
.expect("RemoteBlockDevice::new failed");
assert_eq!(remote_block_device.block_size, 1024);
(ramdisk, remote_block_device)
}
#[fasync::run_singlethreaded(test)]
async fn test_against_ram_disk() {
let (_ramdisk, remote_block_device) = make_ramdisk();
let stats_before = remote_block_device
.device
.lock()
.unwrap()
.get_stats(false, zx::Time::INFINITE)
.expect("get_stats failed");
assert_eq!(stats_before.0, zx::Status::OK.into_raw());
let stats_before = stats_before.1.expect("Processing get_stats result failed");
let vmo = zx::Vmo::create(131072).expect("Vmo::create failed");
vmo.write(b"hello", 5).expect("vmo.write failed");
let vmo_id = remote_block_device.attach_vmo(&vmo).expect("attach_vmo failed");
remote_block_device
.write_at(BufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 0)
.await
.expect("write_at failed");
remote_block_device
.read_at(MutableBufferSlice::new_with_vmo_id(&vmo_id, 1024, 2048), 0)
.await
.expect("read_at failed");
let mut buf: [u8; 5] = Default::default();
vmo.read(&mut buf, 1029).expect("vmo.read failed");
assert_eq!(&buf, b"hello");
remote_block_device.detach_vmo(vmo_id).await.expect("detach_vmo failed");
// check that the stats are what we expect them to be
let stats_after = remote_block_device
.device
.lock()
.unwrap()
.get_stats(false, zx::Time::INFINITE)
.expect("get_stats failed");
assert_eq!(stats_after.0, zx::Status::OK.into_raw());
let stats_after = stats_after.1.expect("Processing get_stats result failed");
// write stats
assert_eq!(
stats_before.write.success.total_calls + 1,
stats_after.write.success.total_calls
);
assert_eq!(
stats_before.write.success.bytes_transferred + 1024,
stats_after.write.success.bytes_transferred
);
assert_eq!(stats_before.write.failure.total_calls, stats_after.write.failure.total_calls);
// read stats
assert_eq!(stats_before.read.success.total_calls + 1, stats_after.read.success.total_calls);
assert_eq!(
stats_before.read.success.bytes_transferred + 2048,
stats_after.read.success.bytes_transferred
);
assert_eq!(stats_before.read.failure.total_calls, stats_after.read.failure.total_calls);
}
#[fasync::run_singlethreaded(test)]
async fn test_against_ram_disk_with_flush() {
let (_ramdisk, remote_block_device) = make_ramdisk();
let stats_before = remote_block_device
.device
.lock()
.unwrap()
.get_stats(false, zx::Time::INFINITE)
.expect("get_stats failed");
assert_eq!(stats_before.0, zx::Status::OK.into_raw());
let stats_before = stats_before.1.expect("Processing get_stats result failed");
let vmo = zx::Vmo::create(131072).expect("Vmo::create failed");
vmo.write(b"hello", 5).expect("vmo.write failed");
let vmo_id = remote_block_device.attach_vmo(&vmo).expect("attach_vmo failed");
remote_block_device
.write_at(BufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 0)
.await
.expect("write_at failed");
remote_block_device.flush().await.expect("flush failed");
remote_block_device
.read_at(MutableBufferSlice::new_with_vmo_id(&vmo_id, 1024, 2048), 0)
.await
.expect("read_at failed");
let mut buf: [u8; 5] = Default::default();
vmo.read(&mut buf, 1029).expect("vmo.read failed");
assert_eq!(&buf, b"hello");
remote_block_device.detach_vmo(vmo_id).await.expect("detach_vmo failed");
// check that the stats are what we expect them to be
let stats_after = remote_block_device
.device
.lock()
.unwrap()
.get_stats(false, zx::Time::INFINITE)
.expect("get_stats failed");
assert_eq!(stats_after.0, zx::Status::OK.into_raw());
let stats_after = stats_after.1.expect("Processing get_stats result failed");
// write stats
assert_eq!(
stats_before.write.success.total_calls + 1,
stats_after.write.success.total_calls
);
assert_eq!(
stats_before.write.success.bytes_transferred + 1024,
stats_after.write.success.bytes_transferred
);
assert_eq!(stats_before.write.failure.total_calls, stats_after.write.failure.total_calls);
// flush stats
assert_eq!(
stats_before.flush.success.total_calls + 1,
stats_after.flush.success.total_calls
);
assert_eq!(stats_before.flush.failure.total_calls, stats_after.flush.failure.total_calls);
// read stats
assert_eq!(stats_before.read.success.total_calls + 1, stats_after.read.success.total_calls);
assert_eq!(
stats_before.read.success.bytes_transferred + 2048,
stats_after.read.success.bytes_transferred
);
assert_eq!(stats_before.read.failure.total_calls, stats_after.read.failure.total_calls);
}
#[fasync::run_singlethreaded(test)]
async fn test_alignment() {
let (_ramdisk, remote_block_device) = make_ramdisk();
let vmo = zx::Vmo::create(131072).expect("Vmo::create failed");
let vmo_id = remote_block_device.attach_vmo(&vmo).expect("attach_vmo failed");
remote_block_device
.write_at(BufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 1)
.await
.expect_err("expected failure due to bad alignment");
remote_block_device.detach_vmo(vmo_id).await.expect("detach_vmo failed");
}
#[fasync::run_singlethreaded(test)]
async fn test_parallel_io() {
let (_ramdisk, remote_block_device) = make_ramdisk();
let vmo = zx::Vmo::create(131072).expect("Vmo::create failed");
let vmo_id = remote_block_device.attach_vmo(&vmo).expect("attach_vmo failed");
let mut reads = Vec::new();
for _ in 0..1024 {
reads.push(
remote_block_device
.read_at(MutableBufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 0)
.inspect_err(|e| panic!("read should have succeeded: {}", e)),
);
}
futures::future::join_all(reads).await;
remote_block_device.detach_vmo(vmo_id).await.expect("detach_vmo failed");
}
#[fasync::run_singlethreaded(test)]
async fn test_closed_device() {
let (ramdisk, remote_block_device) = make_ramdisk();
let vmo = zx::Vmo::create(131072).expect("Vmo::create failed");
let vmo_id = remote_block_device.attach_vmo(&vmo).expect("attach_vmo failed");
let mut reads = Vec::new();
for _ in 0..1024 {
reads.push(
remote_block_device
.read_at(MutableBufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 0),
);
}
assert!(remote_block_device.is_connected());
let _ = futures::join!(futures::future::join_all(reads), async {
ramdisk.destroy().expect("ramdisk.destroy failed")
});
// Destroying the ramdisk is asynchronous. Keep issuing reads until they start failing.
while remote_block_device
.read_at(MutableBufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 0)
.await
.is_ok()
{}
// Sometimes the FIFO will start rejecting requests before FIFO is actually closed, so we
// get false-positives from is_connected.
while remote_block_device.is_connected() {
// Sleep for a bit to minimise lock contention.
fasync::Timer::new(fasync::Time::after(zx::Duration::from_millis(500))).await;
}
// But once is_connected goes negative, it should stay negative.
assert_eq!(remote_block_device.is_connected(), false);
let _ = remote_block_device.detach_vmo(vmo_id).await;
}
#[fasync::run_singlethreaded(test)]
async fn test_cancelled_reads() {
let (_ramdisk, remote_block_device) = make_ramdisk();
let vmo = zx::Vmo::create(131072).expect("Vmo::create failed");
let vmo_id = remote_block_device.attach_vmo(&vmo).expect("attach_vmo failed");
{
let mut reads = FuturesUnordered::new();
for _ in 0..1024 {
reads.push(
remote_block_device
.read_at(MutableBufferSlice::new_with_vmo_id(&vmo_id, 0, 1024), 0),
);
}
// Read the first 500 results and then dump the rest.
for _ in 0..500 {
reads.next().await;
}
}
remote_block_device.detach_vmo(vmo_id).await.expect("detach_vmo failed");
}
#[fasync::run_singlethreaded(test)]
async fn test_parallel_large_read_and_write_with_memory_succeds() {
let (_ramdisk, remote_block_device) = make_ramdisk();
let remote_block_device_ref = &remote_block_device;
let test_one = |offset, len, fill| async move {
let buf = vec![fill; len];
remote_block_device_ref
.write_at(buf[..].into(), offset)
.await
.expect("write_at failed");
// Read back an extra block either side.
let mut read_buf = vec![0u8; len + 2 * RAMDISK_BLOCK_SIZE as usize];
remote_block_device_ref
.read_at(read_buf.as_mut_slice().into(), offset - RAMDISK_BLOCK_SIZE)
.await
.expect("read_at failed");
assert_eq!(
&read_buf[0..RAMDISK_BLOCK_SIZE as usize],
&[0; RAMDISK_BLOCK_SIZE as usize][..]
);
assert_eq!(
&read_buf[RAMDISK_BLOCK_SIZE as usize..RAMDISK_BLOCK_SIZE as usize + len],
&buf[..]
);
assert_eq!(
&read_buf[RAMDISK_BLOCK_SIZE as usize + len..],
&[0; RAMDISK_BLOCK_SIZE as usize][..]
);
};
const WRITE_LEN: usize = super::TEMP_VMO_SIZE * 3 + RAMDISK_BLOCK_SIZE as usize;
join!(
test_one(RAMDISK_BLOCK_SIZE, WRITE_LEN, 0xa3u8),
test_one(2 * RAMDISK_BLOCK_SIZE + WRITE_LEN as u64, WRITE_LEN, 0x7fu8)
);
}
// Implements dummy server which can be used by test cases to verify whether
// channel messages and fifo operations are being received - by using set_channel_handler or
// set_fifo_hander respectively
struct FakeBlockServer<'a> {
server_channel: Option<zx::Channel>,
channel_handler: Box<dyn Fn(&BlockRequest) -> bool + 'a>,
fifo_handler: Box<dyn Fn(BlockFifoRequest) -> BlockFifoResponse + 'a>,
}
impl<'a> FakeBlockServer<'a> {
// Creates a new FakeBlockServer given a channel to listen on.
//
// 'channel_handler' and 'fifo_handler' closures allow for customizing the way how the server
// handles requests received from channel or the fifo respectfully.
//
// 'channel_handler' receives a message before it is handled by the default implementation
// and can return 'true' to indicate all processing is done and no further processing of
// that message is required
//
// 'fifo_handler' takes as input a BlockFifoRequest and produces a response which the
// FakeBlockServer will send over the fifo.
fn new(
server_channel: zx::Channel,
channel_handler: impl Fn(&BlockRequest) -> bool + 'a,
fifo_handler: impl Fn(BlockFifoRequest) -> BlockFifoResponse + 'a,
) -> FakeBlockServer<'a> {
FakeBlockServer {
server_channel: Some(server_channel),
channel_handler: Box::new(channel_handler),
fifo_handler: Box::new(fifo_handler),
}
}
// Runs the server
async fn run(&mut self) {
let server = fidl::endpoints::ServerEnd::<block::BlockMarker>::new(
self.server_channel.take().unwrap(),
);
// Set up a mock server.
let (server_fifo, client_fifo) =
zx::Fifo::create(16, std::mem::size_of::<BlockFifoRequest>())
.expect("Fifo::create failed");
let maybe_server_fifo = std::sync::Mutex::new(Some(client_fifo));
let (fifo_future_abort, fifo_future_abort_registration) = AbortHandle::new_pair();
let fifo_future = Abortable::new(
async {
let fifo = fasync::Fifo::from_fifo(server_fifo).expect("from_fifo failed");
while let Some(request) = fifo.read_entry().await.expect("read_entry failed") {
let response = self.fifo_handler.as_ref()(request);
fifo.write_entries(std::slice::from_ref(&response))
.await
.expect("write_entries failed");
}
},
fifo_future_abort_registration,
);
let channel_future = async {
server
.into_stream()
.expect("into_stream failed")
.for_each(|request| async {
let request = request.expect("unexpected fidl error");
// Give a chance for the test to register and potentially handle the event
if self.channel_handler.as_ref()(&request) {
return;
}
match request {
BlockRequest::GetInfo { responder } => {
let mut block_info = block::BlockInfo {
block_count: 1024,
block_size: 512,
max_transfer_size: 1024 * 1024,
flags: 0,
reserved: 0,
};
responder
.send(zx::sys::ZX_OK, Some(&mut block_info))
.expect("send failed");
}
BlockRequest::GetFifo { responder } => {
responder
.send(zx::sys::ZX_OK, maybe_server_fifo.lock().unwrap().take())
.expect("send failed");
}
BlockRequest::AttachVmo { vmo: _, responder } => {
let mut vmo_id = block::VmoId { id: 1 };
responder
.send(zx::sys::ZX_OK, Some(&mut vmo_id))
.expect("send failed");
}
BlockRequest::CloseFifo { responder } => {
fifo_future_abort.abort();
responder.send(zx::sys::ZX_OK).expect("send failed");
}
_ => panic!("Unexpected message"),
}
})
.await;
};
let _result = join!(fifo_future, channel_future);
//_result can be Err(Aborted) since FifoClose calls .abort but that's expected
}
}
#[fasync::run_singlethreaded(test)]
async fn test_block_fifo_close_is_called() {
let close_called = std::sync::Mutex::new(false);
let (client, server) = zx::Channel::create().expect("Channel::create failed");
// Have to spawn this on a different thread because RemoteBlockDevice uses a synchronous
// client and we are using a single threaded executor.
std::thread::spawn(move || {
let _remote_block_device =
RemoteBlockDevice::new_sync(client).expect("RemoteBlockDevice::new_sync failed");
// The drop here should cause CloseFifo to be sent.
});
let channel_handler = |request: &BlockRequest| -> bool {
if let BlockRequest::CloseFifo { .. } = request {
*close_called.lock().unwrap() = true;
}
false
};
let mut fake_server = FakeBlockServer::new(server, channel_handler, |_| unreachable!());
fake_server.run().await;
// After the server has finished running, we can check to see that close was called.
assert!(*close_called.lock().unwrap());
}
#[fasync::run_singlethreaded(test)]
async fn test_block_flush_is_called() {
let flush_called = std::sync::Mutex::new(false);
let (client, server) = zx::Channel::create().expect("Channel::create failed");
// Have to spawn this on a different thread because RemoteBlockDevice uses a synchronous
// client and we are using a single threaded executor.
std::thread::spawn(move || {
let remote_block_device =
RemoteBlockDevice::new_sync(client).expect("RemoteBlockDevice::new_sync failed");
futures::executor::block_on(remote_block_device.flush())
.expect("RemoteBlockDevice::flush failed");
});
let fifo_handler = |request: BlockFifoRequest| -> BlockFifoResponse {
*flush_called.lock().unwrap() = true;
assert_eq!(request.op_code, super::BLOCKIO_FLUSH);
BlockFifoResponse {
status: zx::Status::OK.into_raw(),
request_id: request.request_id,
..Default::default()
}
};
let mut fake_server = FakeBlockServer::new(server, |_| false, fifo_handler);
fake_server.run().await;
// After the server has finished running, we can check to see that close was called.
assert!(*flush_called.lock().unwrap());
}
}
| |
7.js
|
webpackJsonp([7],{418:function(l,n,e){"use strict";function u(l){return a["ɵvid"](0,[(l()(),a["ɵeld"](0,0,null,null,10,"ion-header",[],null,null,null,null,null)),a["ɵdid"](1,16384,null,0,v.a,[C.a,a.ElementRef,a.Renderer,[2,y.a]],null,null),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](3,0,null,null,6,"ion-navbar",[["class","toolbar"]],[[8,"hidden",0],[2,"statusbar-padding",null]],null,null,h.b,h.a)),a["ɵdid"](4,49152,null,0,R.a,[k.a,[2,y.a],[2,_.a],C.a,a.ElementRef,a.Renderer],null,null),(l()(),a["ɵted"](-1,3,["\n "])),(l()(),a["ɵeld"](6,0,null,3,2,"ion-title",[],null,null,null,x.b,x.a)),a["ɵdid"](7,49152,null,0,I.a,[C.a,a.ElementRef,a.Renderer,[2,E.a],[2,R.a]],null,null),(l()(),a["ɵted"](-1,0,["Credit Card Scan"])),(l()(),a["ɵted"](-1,3,["\n "])),(l()(),a["ɵted"](-1,null,["\n"])),(l()(),a["ɵted"](-1,null,["\n"])),(l()(),a["ɵeld"](12,0,null,null,116,"ion-content",[["padding",""]],[[2,"statusbar-padding",null],[2,"has-refresher",null]],null,null,N.b,N.a)),a["ɵdid"](13,4374528,null,0,M.a,[C.a,q.a,D.a,a.ElementRef,a.Renderer,k.a,T.a,a.NgZone,[2,y.a],[2,_.a]],null,null),(l()(),a["ɵted"](-1,1,["\n "])),(l()(),a["ɵeld"](15,0,null,1,1,"p",[],null,null,null,null,null)),(l()(),a["ɵted"](-1,null,["NOTE: THIS ONLY WORKS ON MOBILE!"])),(l()(),a["ɵted"](-1,1,["\n "])),(l()(),a["ɵeld"](18,0,null,1,16,"div",[],null,null,null,null,null)),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](20,0,null,null,0,"img",[],[[8,"src",4]],null,null,null,null)),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](22,0,null,null,11,"ion-fab",[["class","pop-in"],["right",""],["style","top: 23rem;"]],null,null,null,V.b,V.a)),a["ɵdid"](23,278528,null,0,w.NgClass,[a.IterableDiffers,a.KeyValueDiffers,a.ElementRef,a.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null),a["ɵpod"](24,{"pop-out":0}),a["ɵdid"](25,1228800,null,2,P.a,[q.a],null,null),a["ɵqud"](335544320,1,{_mainButton:0}),a["ɵqud"](603979776,2,{_fabLists:1}),(l()(),a["ɵted"](-1,0,["\n "])),(l()(),a["ɵeld"](29,0,null,0,3,"button",[["color","dark"],["ion-fab",""],["mini",""]],null,[[null,"click"]],function(l,n,e){var u=!0;if("click"===n){u=!1!==l.component.scanCard()&&u}return u},j.b,j.a)),a["ɵdid"](30,49152,[[1,4]],0,L.a,[C.a,a.ElementRef,a.Renderer],{color:[0,"color"]},null),(l()(),a["ɵeld"](31,0,null,0,1,"ion-icon",[["name","camera"],["role","img"]],[[2,"hide",null]],null,null,null,null)),a["ɵdid"](32,147456,null,0,O.a,[C.a,a.ElementRef,a.Renderer],{name:[0,"name"]},null),(l()(),a["ɵted"](-1,0,["\n "])),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵted"](-1,1,["\n "])),(l()(),a["ɵeld"](36,0,null,1,91,"div",[],null,null,null,null,null)),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](38,0,null,null,16,"ion-item",[["class","item item-block"]],null,null,null,K.b,K.a)),a["ɵdid"](39,1097728,null,3,S.a,[Y.a,C.a,a.ElementRef,a.Renderer,[2,B.a]],null,null),a["ɵqud"](335544320,3,{contentLabel:0}),a["ɵqud"](603979776,4,{_buttons:1}),a["ɵqud"](603979776,5,{_icons:1}),a["ɵdid"](43,16384,null,0,F.a,[],null,null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](45,0,null,1,2,"ion-label",[],null,null,null,null,null)),a["ɵdid"](46,16384,[[3,4]],0,U.a,[C.a,a.ElementRef,a.Renderer,[8,null],[8,null],[8,null],[8,null]],null,null),(l()(),a["ɵted"](-1,null,["Card Type"])),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](49,0,null,3,4,"ion-input",[["type","text"]],[[2,"ng-untouched",null],[2,"ng-touched",null],[2,"ng-pristine",null],[2,"ng-dirty",null],[2,"ng-valid",null],[2,"ng-invalid",null],[2,"ng-pending",null]],[[null,"ngModelChange"]],function(l,n,e){var u=!0;if("ngModelChange"===n){u=!1!==(l.component.card.cardType=e)&&u}return u},G.b,G.a)),a["ɵdid"](50,671744,null,0,A.n,[[8,null],[8,null],[8,null],[8,null]],{model:[0,"model"]},{update:"ngModelChange"}),a["ɵprd"](2048,null,A.j,null,[A.n]),a["ɵdid"](52,16384,null,0,A.k,[A.j],null,null),a["ɵdid"](53,5423104,null,0,W.a,[C.a,q.a,Y.a,k.a,a.ElementRef,a.Renderer,[2,M.a],[2,S.a],[2,A.j],D.a],{type:[0,"type"]},null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](56,0,null,null,16,"ion-item",[["class","item item-block"]],null,null,null,K.b,K.a)),a["ɵdid"](57,1097728,null,3,S.a,[Y.a,C.a,a.ElementRef,a.Renderer,[2,B.a]],null,null),a["ɵqud"](335544320,6,{contentLabel:0}),a["ɵqud"](603979776,7,{_buttons:1}),a["ɵqud"](603979776,8,{_icons:1}),a["ɵdid"](61,16384,null,0,F.a,[],null,null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](63,0,null,1,2,"ion-label",[],null,null,null,null,null)),a["ɵdid"](64,16384,[[6,4]],0,U.a,[C.a,a.ElementRef,a.Renderer,[8,null],[8,null],[8,null],[8,null]],null,null),(l()(),a["ɵted"](-1,null,["Card Number"])),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](67,0,null,3,4,"ion-input",[["type","text"]],[[2,"ng-untouched",null],[2,"ng-touched",null],[2,"ng-pristine",null],[2,"ng-dirty",null],[2,"ng-valid",null],[2,"ng-invalid",null],[2,"ng-pending",null]],[[null,"ngModelChange"]],function(l,n,e){var u=!0;if("ngModelChange"===n){u=!1!==(l.component.card.cardNumber=e)&&u}return u},G.b,G.a)),a["ɵdid"](68,671744,null,0,A.n,[[8,null],[8,null],[8,null],[8,null]],{model:[0,"model"]},{update:"ngModelChange"}),a["ɵprd"](2048,null,A.j,null,[A.n]),a["ɵdid"](70,16384,null,0,A.k,[A.j],null,null),a["ɵdid"](71,5423104,null,0,W.a,[C.a,q.a,Y.a,k.a,a.ElementRef,a.Renderer,[2,M.a],[2,S.a],[2,A.j],D.a],{type:[0,"type"]},null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](74,0,null,null,16,"ion-item",[["class","item item-block"]],null,null,null,K.b,K.a)),a["ɵdid"](75,1097728,null,3,S.a,[Y.a,C.a,a.ElementRef,a.Renderer,[2,B.a]],null,null),a["ɵqud"](335544320,9,{contentLabel:0}),a["ɵqud"](603979776,10,{_buttons:1}),a["ɵqud"](603979776,11,{_icons:1}),a["ɵdid"](79,16384,null,0,F.a,[],null,null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](81,0,null,1,2,"ion-label",[],null,null,null,null,null)),a["ɵdid"](82,16384,[[9,4]],0,U.a,[C.a,a.ElementRef,a.Renderer,[8,null],[8,null],[8,null],[8,null]],null,null),(l()(),a["ɵted"](-1,null,["Expiry Month"])),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](85,0,null,3,4,"ion-input",[["type","text"]],[[2,"ng-untouched",null],[2,"ng-touched",null],[2,"ng-pristine",null],[2,"ng-dirty",null],[2,"ng-valid",null],[2,"ng-invalid",null],[2,"ng-pending",null]],[[null,"ngModelChange"]],function(l,n,e){var u=!0;if("ngModelChange"===n){u=!1!==(l.component.card.expiryMonth=e)&&u}return u},G.b,G.a)),a["ɵdid"](86,671744,null,0,A.n,[[8,null],[8,null],[8,null],[8,null]],{model:[0,"model"]},{update:"ngModelChange"}),a["ɵprd"](2048,null,A.j,null,[A.n]),a["ɵdid"](88,16384,null,0,A.k,[A.j],null,null),a["ɵdid"](89,5423104,null,0,W.a,[C.a,q.a,Y.a,k.a,a.ElementRef,a.Renderer,[2,M.a],[2,S.a],[2,A.j],D.a],{type:[0,"type"]},null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](92,0,null,null,16,"ion-item",[["class","item item-block"]],null,null,null,K.b,K.a)),a["ɵdid"](93,1097728,null,3,S.a,[Y.a,C.a,a.ElementRef,a.Renderer,[2,B.a]],null,null),a["ɵqud"](335544320,12,{contentLabel:0}),a["ɵqud"](603979776,13,{_buttons:1}),a["ɵqud"](603979776,14,{_icons:1}),a["ɵdid"](97,16384,null,0,F.a,[],null,null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](99,0,null,1,2,"ion-label",[],null,null,null,null,null)),a["ɵdid"](100,16384,[[12,4]],0,U.a,[C.a,a.ElementRef,a.Renderer,[8,null],[8,null],[8,null],[8,null]],null,null),(l()(),a["ɵted"](-1,null,["Expiry Year"])),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](103,0,null,3,4,"ion-input",[["type","text"]],[[2,"ng-untouched",null],[2,"ng-touched",null],[2,"ng-pristine",null],[2,"ng-dirty",null],[2,"ng-valid",null],[2,"ng-invalid",null],[2,"ng-pending",null]],[[null,"ngModelChange"]],function(l,n,e){var u=!0;if("ngModelChange"===n){u=!1!==(l.component.card.expiryYear=e)&&u}return u},G.b,G.a)),a["ɵdid"](104,671744,null,0,A.n,[[8,null],[8,null],[8,null],[8,null]],{model:[0,"model"]},{update:"ngModelChange"}),a["ɵprd"](2048,null,A.j,null,[A.n]),a["ɵdid"](106,16384,null,0,A.k,[A.j],null,null),a["ɵdid"](107,5423104,null,0,W.a,[C.a,q.a,Y.a,k.a,a.ElementRef,a.Renderer,[2,M.a],[2,S.a],[2,A.j],D.a],{type:[0,"type"]},null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵeld"](110,0,null,null,16,"ion-item",[["class","item item-block"]],null,null,null,K.b,K.a)),a["ɵdid"](111,1097728,null,3,S.a,[Y.a,C.a,a.ElementRef,a.Renderer,[2,B.a]],null,null),a["ɵqud"](335544320,15,{contentLabel:0}),a["ɵqud"](603979776,16,{_buttons:1}),a["ɵqud"](603979776,17,{_icons:1}),a["ɵdid"](115,16384,null,0,F.a,[],null,null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](117,0,null,1,2,"ion-label",[],null,null,null,null,null)),a["ɵdid"](118,16384,[[15,4]],0,U.a,[C.a,a.ElementRef,a.Renderer,[8,null],[8,null],[8,null],[8,null]],null,null),(l()(),a["ɵted"](-1,null,["CVV"])),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵeld"](121,0,null,3,4,"ion-input",[["type","text"]],[[2,"ng-untouched",null],[2,"ng-touched",null],[2,"ng-pristine",null],[2,"ng-dirty",null],[2,"ng-valid",null],[2,"ng-invalid",null],[2,"ng-pending",null]],[[null,"ngModelChange"]],function(l,n,e){var u=!0;if("ngModelChange"===n){u=!1!==(l.component.card.cvv=e)&&u}return u},G.b,G.a)),a["ɵdid"](122,671744,null,0,A.n,[[8,null],[8,null],[8,null],[8,null]],{model:[0,"model"]},{update:"ngModelChange"}),a["ɵprd"](2048,null,A.j,null,[A.n]),a["ɵdid"](124,16384,null,0,A.k,[A.j],null,null),a["ɵdid"](125,5423104,null,0,W.a,[C.a,q.a,Y.a,k.a,a.ElementRef,a.Renderer,[2,M.a],[2,S.a],[2,A.j],D.a],{type:[0,"type"]},null),(l()(),a["ɵted"](-1,2,["\n "])),(l()(),a["ɵted"](-1,null,["\n "])),(l()(),a["ɵted"](-1,1,["\n"])),(l()(),a["ɵted"](-1,null,["\n\n"]))],function(l,n){var e=n.component;l(n,23,0,"pop-in",l(n,24,0,e.fabGone));l(n,30,0,"dark");l(n,32,0,"camera");l(n,50,0,e.card.cardType);l(n,53,0,"text");l(n,68,0,e.card.cardNumber);l(n,71,0,"text");l(n,86,0,e.card.expiryMonth);l(n,89,0,"text");l(n,104,0,e.card.expiryYear);l(n,107,0,"text");l(n,122,0,e.card.cvv);l(n,125,0,"text")},function(l,n){var e=n.component;l(n,3,0,a["ɵnov"](n,4)._hidden,a["ɵnov"](n,4)._sbPadding);l(n,12,0,a["ɵnov"](n,13).statusbarPadding,a["ɵnov"](n,13)._hasRefresher);l(n,20,0,e.cardImage);l(n,31,0,a["ɵnov"](n,32)._hidden);l(n,49,0,a["ɵnov"](n,52).ngClassUntouched,a["ɵnov"](n,52).ngClassTouched,a["ɵnov"](n,52).ngClassPristine,a["ɵnov"](n,52).ngClassDirty,a["ɵnov"](n,52).ngClassValid,a["ɵnov"](n,52).ngClassInvalid,a["ɵnov"](n,52).ngClassPending);l(n,67,0,a["ɵnov"](n,70).ngClassUntouched,a["ɵnov"](n,70).ngClassTouched,a["ɵnov"](n,70).ngClassPristine,a["ɵnov"](n,70).ngClassDirty,a["ɵnov"](n,70).ngClassValid,a["ɵnov"](n,70).ngClassInvalid,a["ɵnov"](n,70).ngClassPending);l(n,85,0,a["ɵnov"](n,88).ngClassUntouched,a["ɵnov"](n,88).ngClassTouched,a["ɵnov"](n,88).ngClassPristine,a["ɵnov"](n,88).ngClassDirty,a["ɵnov"](n,88).ngClassValid,a["ɵnov"](n,88).ngClassInvalid,a["ɵnov"](n,88).ngClassPending);l(n,103,0,a["ɵnov"](n,106).ngClassUntouched,a["ɵnov"](n,106).ngClassTouched,a["ɵnov"](n,106).ngClassPristine,a["ɵnov"](n,106).ngClassDirty,a["ɵnov"](n,106).ngClassValid,a["ɵnov"](n,106).ngClassInvalid,a["ɵnov"](n,106).ngClassPending);l(n,121,0,a["ɵnov"](n,124).ngClassUntouched,a["ɵnov"](n,124).ngClassTouched,a["ɵnov"](n,124).ngClassPristine,a["ɵnov"](n,124).ngClassDirty,a["ɵnov"](n,124).ngClassValid,a["ɵnov"](n,124).ngClassInvalid,a["ɵnov"](n,124).ngClassPending)})}Object.defineProperty(n,"__esModule",{value:!0});var a=e(0),t=(e(3),e(155)),d=(e(27),function(){function l(l,n){this.navCtrl=l,this.cardIO=n,this.cardImage="assets/img/misc/credit-card.png",this.card={cardType:"",cardNumber:"",redactedCardNumber:"",expiryMonth:null,expiryYear:null,cvv:"",postalCode:""},this.fabGone=!1}return l.prototype.ionViewDidLoad=function(){console.log("Hello CreditCardScan Page")},l.prototype.scanCard=function(){var l=this;this.cardIO.canScan().then(function(n){if(n){l.cardIO.scan({scanExpiry:!0,hideCardIOLogo:!0,scanInstructions:"Please position your card inside the frame",keepApplicationTheme:!0,requireCCV:!0,requireExpiry:!0,requirePostalCode:!1}).then(function(n){console.log("Scan complete");l.card={cardType:n.cardType,cardNumber:n.cardNumber,redactedCardNumber:n.redactedCardNumber,expiryMonth:n.expiryMonth,expiryYear:n.expiryYear,cvv:n.cvv,postalCode:n.postalCode}})}})},l.prototype.ionViewWillEnter=function(){this.fabGone=!1},l.prototype.ionViewWillLeave=function(){this.fabGone=!0},l}()),o=function(){return function(){}}(),i=e(346),r=e(347),s=e(348),c=e(349),f=e(350),p=e(351),g=e(352),m=e(353),b=e(354),v=e(78),C=e(2),y=e(6),h=e(497),R=e(37),k=e(9),_=e(20),x=e(355),I=e(57),E=e(38),N=e(345),M=e(22),q=e(5),D=e(10),T=e(25),V=e(502),w=e(13),P=e(138),j=e(500),L=e(59),O=e(41),K=e(127),S=e(17),Y=e(15),B=e(43),F=e(58),U=e(49),G=e(501),A=e(19),W=e(133),z=a["ɵcrt"]({encapsulation:2,styles:[],data:{}}),H=a["ɵccf"]("page-credit-card-scan",d,function(l){return a["ɵvid"](0,[(l()(),a["ɵeld"](0,0,null,null,1,"page-credit-card-scan",[],null,null,null,u,z)),a["ɵdid"](1,49152,null,0,d,[_.a,t.a],null,null)],null,null)},{},{},[]),J=e(126),Z=e(42);e.d(n,"CreditCardScanPageModuleNgFactory",function(){return Q});var Q=a["ɵcmf"](o,[],function(l){return a["ɵmod"]([a["ɵmpd"](512,a.ComponentFactoryResolver,a["ɵCodegenComponentFactoryResolver"],[[8,[i.a,r.a,s.a,c.a,f.a,p.a,g.a,m.a,b.a,H]],[3,a.ComponentFactoryResolver],a.NgModuleRef]),a["ɵmpd"](4608,w.NgLocalization,w.NgLocaleLocalization,[a.LOCALE_ID,[2,w["ɵa"]]]),a["ɵmpd"](4608,A.s,A.s,[]),a["ɵmpd"](4608,A.d,A.d,[]),a["ɵmpd"](512,w.CommonModule,w.CommonModule,[]),a["ɵmpd"](512,A.q,A.q,[]),a["ɵmpd"](512,A.h,A.h,[]),a["ɵmpd"](512,A.o,A.o,[]),a["ɵmpd"](512,J.a,J.a,[]),a["ɵmpd"](512,J.b,J.b,[]),a["ɵmpd"](512,o,o,[]),a["ɵmpd"](256,Z.a,d,[])])})},497:function(l,n,e){"use strict";function u(l){return a["ɵvid"](0,[(l()(),a["ɵeld"](0,0,null,null,1,"div",[["class","toolbar-background"]],null,null,null,null,null)),a["ɵdid"](1,278528,null,0,t.NgClass,[a.IterableDiffers,a.KeyValueDiffers,a.ElementRef,a.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null),(l()(),a["ɵeld"](2,0,null,null,8,"button",[["class","back-button"],["ion-button","bar-button"]],[[8,"hidden",0]],[[null,"click"]],function(l,n,e){var u=!0;if("click"===n){u=!1!==l.component.backButtonClick(e)&&u}return u},d.b,d.a)),a["ɵdid"](3,278528,null,0,t.NgClass,[a.IterableDiffers,a.KeyValueDiffers,a.ElementRef,a.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null),a["ɵdid"](4,1097728,null,0,o.a,[[8,"bar-button"],i.a,a.ElementRef,a.Renderer],null,null),(l()(),a["ɵeld"](5,0,null,0,2,"ion-icon",[["class","back-button-icon"],["role","img"]],[[2,"hide",null]],null,null,null,null)),a["ɵdid"](6,278528,null,0,t.NgClass,[a.IterableDiffers,a.KeyValueDiffers,a.ElementRef,a.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null),a["ɵdid"](7,147456,null,0,r.a,[i.a,a.ElementRef,a.Renderer],{name:[0,"name"]},null),(l()(),a["ɵeld"](8,0,null,0,2,"span",[["class","back-button-text"]],null,null,null,null,null)),a["ɵdid"](9,278528,null,0,t.NgClass,[a.IterableDiffers,a.KeyValueDiffers,a.ElementRef,a.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null),(l()(),a["ɵted"](10,null,["",""])),a["ɵncd"](null,0),a["ɵncd"](null,1),a["ɵncd"](null,2),(l()(),a["ɵeld"](14,0,null,null,2,"div",[["class","toolbar-content"]],null,null,null,null,null)),a["ɵdid"](15,278528,null,0,t.NgClass,[a.IterableDiffers,a.KeyValueDiffers,a.ElementRef,a.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null),a["ɵncd"](null,3)],function(l,n){var e=n.component;l(n,1,0,"toolbar-background","toolbar-background-"+e._mode);l(n,3,0,"back-button","back-button-"+e._mode);l(n,6,0,"back-button-icon","back-button-icon-"+e._mode);l(n,7,0,e._bbIcon);l(n,9,0,"back-button-text","back-button-text-"+e._mode);l(n,15,0,"toolbar-content","toolbar-content-"+e._mode)},function(l,n){var e=n.component;l(n,2,0,e._hideBb);l(n,5,0,a["ɵnov"](n,7)._hidden);l(n,10,0,e._backText)})}e.d(n,"a",function(){return s}),n.b=u;var a=e(0),t=e(13),d=e(47),o=e(21),i=e(2),r=e(41),s=(e(6),e(20),a["ɵcrt"]({encapsulation:2,styles:[],data:{}}))},500:function(l,n,e){"use strict";function u(l){return a["ɵvid"](2,[(l()(),a["ɵeld"](0,0,null,null,1,"ion-icon",[["class","fab-close-icon"],["name","close"],["role","img"]],[[2,"hide",null]],null,null,null,null)),a["ɵdid"](1,147456,null,0,t.a,[d.a,a.ElementRef,a.Renderer],{name:[0,"name"]},null),(l()(),a["ɵeld"](2,0,null,null,1,"span",[["class","button-inner"]],null,null,null,null,null)),a["ɵncd"](null,0),(l()(),a["ɵeld"](4,0,null,null,0,"div",[["class","button-effect"]],null,null,null,null,null))],function(l,n){l(n,1,0,"close")},function(l,n){l(n,0,0,a["ɵnov"](n,1)._hidden)})}e.d(n,"a",function(){return o}),n.b=u;var a=e(0),t=e(41),d=e(2),o=a["ɵcrt"]({encapsulation:2,styles:[],data:{}})},501:function(l,n,e){"use strict";function u(l){return i["ɵvid"](0,[(l()(),i["ɵeld"](0,0,[[1,0],["textInput",1]],null,1,"input",[["class","text-input"],["dir","auto"]],[[8,"type",0],[1,"aria-labelledby",0],[1,"min",0],[1,"max",0],[1,"step",0],[1,"autocomplete",0],[1,"autocorrect",0],[8,"placeholder",0],[8,"disabled",0],[8,"readOnly",0]],[[null,"input"],[null,"blur"],[null,"focus"],[null,"keydown"]],function(l,n,e){var u=!0,a=l.component;if("input"===n){u=!1!==a.onInput(e)&&u}if("blur"===n){u=!1!==a.onBlur(e)&&u}if("focus"===n){u=!1!==a.onFocus(e)&&u}if("keydown"===n){u=!1!==a.onKeydown(e)&&u}return u},null,null)),i["ɵdid"](1,278528,null,0,r.NgClass,[i.IterableDiffers,i.KeyValueDiffers,i.ElementRef,i.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null)],function(l,n){l(n,1,0,"text-input","text-input-"+n.component._mode)},function(l,n){var e=n.component;l(n,0,0,e._type,e._labelId,e.min,e.max,e.step,e.autocomplete,e.autocorrect,e.placeholder,e._disabled,e._readonly)})}function a(l){return i["ɵvid"](0,[(l()(),i["ɵeld"](0,0,[[1,0],["textInput",1]],null,1,"textarea",[["class","text-input"]],[[1,"aria-labelledby",0],[1,"autocomplete",0],[1,"autocorrect",0],[8,"placeholder",0],[8,"disabled",0],[8,"readOnly",0]],[[null,"input"],[null,"blur"],[null,"focus"],[null,"keydown"]],function(l,n,e){var u=!0,a=l.component;if("input"===n){u=!1!==a.onInput(e)&&u}if("blur"===n){u=!1!==a.onBlur(e)&&u}if("focus"===n){u=!1!==a.onFocus(e)&&u}if("keydown"===n){u=!1!==a.onKeydown(e)&&u}return u},null,null)),i["ɵdid"](1,278528,null,0,r.NgClass,[i.IterableDiffers,i.KeyValueDiffers,i.ElementRef,i.Renderer2],{klass:[0,"klass"],ngClass:[1,"ngClass"]},null)],function(l,n){l(n,1,0,"text-input","text-input-"+n.component._mode)},function(l,n){var e=n.component;l(n,0,0,e._labelId,e.autocomplete,e.autocorrect,e.placeholder,e._disabled,e._readonly)})}function t(l){return i["ɵvid"](0,[(l()(),i["ɵeld"](0,0,null,null,1,"button",[["class","text-input-clear-icon"],["clear",""],["ion-button",""],["tabindex","-1"],["type","button"]],null,[[null,"click"],[null,"mousedown"]],function(l,n,e)
|
var u=!0,a=l.component;if("click"===n){u=!1!==a.clearTextInput(e)&&u}if("mousedown"===n){u=!1!==a.clearTextInput(e)&&u}return u},s.b,s.a)),i["ɵdid"](1,1097728,null,0,c.a,[[8,""],f.a,i.ElementRef,i.Renderer],{clear:[0,"clear"]},null)],function(l,n){l(n,1,0,"")},null)}function d(l){return i["ɵvid"](0,[(l()(),i["ɵeld"](0,0,null,null,0,"div",[["class","input-cover"]],null,[[null,"touchstart"],[null,"touchend"],[null,"mousedown"],[null,"mouseup"]],function(l,n,e){var u=!0,a=l.component;if("touchstart"===n){u=!1!==a._pointerStart(e)&&u}if("touchend"===n){u=!1!==a._pointerEnd(e)&&u}if("mousedown"===n){u=!1!==a._pointerStart(e)&&u}if("mouseup"===n){u=!1!==a._pointerEnd(e)&&u}return u},null,null))],null,null)}function o(l){return i["ɵvid"](2,[i["ɵqud"](671088640,1,{_native:0}),(l()(),i["ɵand"](16777216,null,null,1,null,u)),i["ɵdid"](2,16384,null,0,r.NgIf,[i.ViewContainerRef,i.TemplateRef],{ngIf:[0,"ngIf"]},null),(l()(),i["ɵand"](16777216,null,null,1,null,a)),i["ɵdid"](4,16384,null,0,r.NgIf,[i.ViewContainerRef,i.TemplateRef],{ngIf:[0,"ngIf"]},null),(l()(),i["ɵand"](16777216,null,null,1,null,t)),i["ɵdid"](6,16384,null,0,r.NgIf,[i.ViewContainerRef,i.TemplateRef],{ngIf:[0,"ngIf"]},null),(l()(),i["ɵand"](16777216,null,null,1,null,d)),i["ɵdid"](8,16384,null,0,r.NgIf,[i.ViewContainerRef,i.TemplateRef],{ngIf:[0,"ngIf"]},null)],function(l,n){var e=n.component;l(n,2,0,!e._isTextarea);l(n,4,0,e._isTextarea);l(n,6,0,e._clearInput);l(n,8,0,e._useAssist)},null)}e.d(n,"a",function(){return p}),n.b=o;var i=e(0),r=e(13),s=e(47),c=e(21),f=e(2),p=(e(5),e(15),e(10),i["ɵcrt"]({encapsulation:2,styles:[],data:{}}))},502:function(l,n,e){"use strict";function u(l){return a["ɵvid"](0,[a["ɵncd"](null,0)],null,null)}e.d(n,"a",function(){return t}),n.b=u;var a=e(0),t=(e(5),a["ɵcrt"]({encapsulation:2,styles:[],data:{}}))}});
|
{
|
testing.py
|
# -*- coding: utf-8 -*-
"""
flask.testing
~~~~~~~~~~~~~
Implements test support helpers. This module is lazily imported
and usually not used in production environments.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import werkzeug
from contextlib import contextmanager
from werkzeug.test import Client, EnvironBuilder
from flask import _request_ctx_stack
from flask.json import dumps as json_dumps
try:
from werkzeug.urls import url_parse
except ImportError:
from urlparse import urlsplit as url_parse
def make_test_environ_builder(
app, path='/', base_url=None, subdomain=None, url_scheme=None,
*args, **kwargs
):
"""Creates a new test builder with some application defaults thrown in."""
assert (
not (base_url or subdomain or url_scheme)
or (base_url is not None) != bool(subdomain or url_scheme)
), 'Cannot pass "subdomain" or "url_scheme" with "base_url".'
if base_url is None:
http_host = app.config.get('SERVER_NAME') or 'localhost'
app_root = app.config['APPLICATION_ROOT']
if subdomain:
http_host = '{0}.{1}'.format(subdomain, http_host)
if url_scheme is None:
url_scheme = app.config['PREFERRED_URL_SCHEME']
url = url_parse(path)
base_url = '{0}://{1}/{2}'.format(
url_scheme, url.netloc or http_host, app_root.lstrip('/')
)
path = url.path
if url.query:
sep = b'?' if isinstance(url.query, bytes) else '?'
path += sep + url.query
if 'json' in kwargs:
assert 'data' not in kwargs, (
"Client cannot provide both 'json' and 'data'."
)
# push a context so flask.json can use app's json attributes
with app.app_context():
kwargs['data'] = json_dumps(kwargs.pop('json'))
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/json'
return EnvironBuilder(path, base_url, *args, **kwargs)
class FlaskClient(Client):
"""Works like a regular Werkzeug test client but has some knowledge about
how Flask works to defer the cleanup of the request context stack to the
end of a ``with`` body when used in a ``with`` statement. For general
information about how to use this class refer to
:class:`werkzeug.test.Client`.
.. versionchanged:: 0.12
`app.test_client()` includes preset default environment, which can be
set after instantiation of the `app.test_client()` object in
`client.environ_base`.
Basic usage is outlined in the :ref:`testing` chapter.
"""
preserve_context = False
def __init__(self, *args, **kwargs):
super(FlaskClient, self).__init__(*args, **kwargs)
self.environ_base = {
"REMOTE_ADDR": "127.0.0.1",
"HTTP_USER_AGENT": "werkzeug/" + werkzeug.__version__
}
@contextmanager
def session_transaction(self, *args, **kwargs):
"""When used in combination with a ``with`` statement this opens a
session transaction. This can be used to modify the session that
the test client uses. Once the ``with`` block is left the session is
stored back.
::
with client.session_transaction() as session:
session['value'] = 42
Internally this is implemented by going through a temporary test
request context and since session handling could depend on
request variables this function accepts the same arguments as
:meth:`~flask.Flask.test_request_context` which are directly
passed through.
"""
if self.cookie_jar is None:
raise RuntimeError('Session transactions only make sense '
'with cookies enabled.')
app = self.application
environ_overrides = kwargs.setdefault('environ_overrides', {})
self.cookie_jar.inject_wsgi(environ_overrides)
outer_reqctx = _request_ctx_stack.top
with app.test_request_context(*args, **kwargs) as c:
session_interface = app.session_interface
sess = session_interface.open_session(app, c.request)
if sess is None:
raise RuntimeError('Session backend did not open a session. '
'Check the configuration')
# Since we have to open a new request context for the session
# handling we want to make sure that we hide out own context
# from the caller. By pushing the original request context
# (or None) on top of this and popping it we get exactly that
# behavior. It's important to not use the push and pop
# methods of the actual request context object since that would
# mean that cleanup handlers are called
_request_ctx_stack.push(outer_reqctx)
try:
yield sess
finally:
_request_ctx_stack.pop()
resp = app.response_class()
if not session_interface.is_null_session(sess):
session_interface.save_session(app, sess, resp)
headers = resp.get_wsgi_headers(c.request.environ)
self.cookie_jar.extract_wsgi(c.request.environ, headers)
def open(self, *args, **kwargs):
kwargs.setdefault('environ_overrides', {}) \
['flask._preserve_context'] = self.preserve_context
kwargs.setdefault('environ_base', self.environ_base)
as_tuple = kwargs.pop('as_tuple', False)
buffered = kwargs.pop('buffered', False)
follow_redirects = kwargs.pop('follow_redirects', False)
builder = make_test_environ_builder(self.application, *args, **kwargs)
return Client.open(self, builder,
as_tuple=as_tuple,
buffered=buffered,
follow_redirects=follow_redirects)
def __enter__(self):
|
def __exit__(self, exc_type, exc_value, tb):
self.preserve_context = False
# on exit we want to clean up earlier. Normally the request context
# stays preserved until the next request in the same thread comes
# in. See RequestGlobals.push() for the general behavior.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop()
|
if self.preserve_context:
raise RuntimeError('Cannot nest client invocations')
self.preserve_context = True
return self
|
classes_request_builder.go
|
package classes
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be "github.com/microsoftgraph/msgraph-beta-sdk-go/models"
i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459 "github.com/microsoftgraph/msgraph-beta-sdk-go/models/odataerrors"
i3d74e6f522ebcf67a89d51ffa78066eadb31a2c7d004b2f456cb012d97d17b3b "github.com/microsoftgraph/msgraph-beta-sdk-go/education/schools/item/classes/count"
)
// ClassesRequestBuilder provides operations to manage the classes property of the microsoft.graph.educationSchool entity.
type ClassesRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// ClassesRequestBuilderGetQueryParameters classes taught at the school. Nullable.
type ClassesRequestBuilderGetQueryParameters struct {
// Include count of items
Count *bool `uriparametername:"%24count"`
// Expand related entities
Expand []string `uriparametername:"%24expand"`
// Filter items by property values
Filter *string `uriparametername:"%24filter"`
// Order items by property values
Orderby []string `uriparametername:"%24orderby"`
// Search items by search phrases
Search *string `uriparametername:"%24search"`
// Select properties to be returned
Select []string `uriparametername:"%24select"`
// Skip the first n items
Skip *int32 `uriparametername:"%24skip"`
// Show only the first n items
Top *int32 `uriparametername:"%24top"`
}
// ClassesRequestBuilderGetRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type ClassesRequestBuilderGetRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
// Request query parameters
QueryParameters *ClassesRequestBuilderGetQueryParameters
}
// NewClassesRequestBuilderInternal instantiates a new ClassesRequestBuilder and sets the default values.
func NewClassesRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*ClassesRequestBuilder) {
m := &ClassesRequestBuilder{
}
m.urlTemplate = "{+baseurl}/education/schools/{educationSchool%2Did}/classes{?%24top,%24skip,%24search,%24filter,%24count,%24orderby,%24select,%24expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewClassesRequestBuilder instantiates a new ClassesRequestBuilder and sets the default values.
func NewClassesRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*ClassesRequestBuilder)
|
// Count the count property
func (m *ClassesRequestBuilder) Count()(*i3d74e6f522ebcf67a89d51ffa78066eadb31a2c7d004b2f456cb012d97d17b3b.CountRequestBuilder) {
return i3d74e6f522ebcf67a89d51ffa78066eadb31a2c7d004b2f456cb012d97d17b3b.NewCountRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
// CreateGetRequestInformation classes taught at the school. Nullable.
func (m *ClassesRequestBuilder) CreateGetRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil);
}
// CreateGetRequestInformationWithRequestConfiguration classes taught at the school. Nullable.
func (m *ClassesRequestBuilder) CreateGetRequestInformationWithRequestConfiguration(requestConfiguration *ClassesRequestBuilderGetRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.GET
if requestConfiguration != nil {
if requestConfiguration.QueryParameters != nil {
requestInfo.AddQueryParameters(*(requestConfiguration.QueryParameters))
}
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// Get classes taught at the school. Nullable.
func (m *ClassesRequestBuilder) Get()(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.EducationClassCollectionResponseable, error) {
return m.GetWithRequestConfigurationAndResponseHandler(nil, nil);
}
// GetWithRequestConfigurationAndResponseHandler classes taught at the school. Nullable.
func (m *ClassesRequestBuilder) GetWithRequestConfigurationAndResponseHandler(requestConfiguration *ClassesRequestBuilderGetRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.EducationClassCollectionResponseable, error) {
requestInfo, err := m.CreateGetRequestInformationWithRequestConfiguration(requestConfiguration);
if err != nil {
return nil, err
}
errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {
"4XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,
"5XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,
}
res, err := m.requestAdapter.SendAsync(requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateEducationClassCollectionResponseFromDiscriminatorValue, responseHandler, errorMapping)
if err != nil {
return nil, err
}
return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.EducationClassCollectionResponseable), nil
}
|
{
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewClassesRequestBuilderInternal(urlParams, requestAdapter)
}
|
taxiAvailability.py
|
import requests
import os
ROOT_URL = 'http://datamall2.mytransport.sg/ltaodataservice'
def
|
():
result = None
try:
url = '{}/Taxi-Availability'.format(ROOT_URL)
headers = {
'AccountKey': os.getenv('ACCOUNT_KEY'),
'Accept': 'application/json'
}
response = requests.get(url, headers=headers)
print('response status = ', response.status_code)
print('response json = ', response.json())
if response.status_code == 200:
result = response.json()
except Exception as e:
print('error = ', e)
return result
|
get_taxi_availability_request
|
getAgentPool.ts
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs, enums } from "../../types";
import * as utilities from "../../utilities";
/**
* Agent Pool.
*/
export function getAgentPool(args: GetAgentPoolArgs, opts?: pulumi.InvokeOptions): Promise<GetAgentPoolResult> {
if (!opts) {
opts = {}
}
if (!opts.version) {
opts.version = utilities.getVersion();
}
return pulumi.runtime.invoke("azure-native:containerservice/v20201201:getAgentPool", {
"agentPoolName": args.agentPoolName,
"resourceGroupName": args.resourceGroupName,
"resourceName": args.resourceName,
}, opts);
}
export interface GetAgentPoolArgs {
/**
* The name of the agent pool.
*/
readonly agentPoolName: string;
/**
* The name of the resource group.
*/
readonly resourceGroupName: string;
/**
* The name of the managed cluster resource.
*/
readonly resourceName: string;
}
/**
* Agent Pool.
*/
export interface GetAgentPoolResult {
/**
* Availability zones for nodes. Must use VirtualMachineScaleSets AgentPoolType.
*/
readonly availabilityZones?: string[];
/**
* Number of agents (VMs) to host docker containers. Allowed values must be in the range of 0 to 100 (inclusive) for user pools and in the range of 1 to 100 (inclusive) for system pools. The default value is 1.
*/
readonly count?: number;
/**
* Whether to enable auto-scaler
*/
readonly enableAutoScaling?: boolean;
/**
* Whether to enable EncryptionAtHost
*/
readonly enableEncryptionAtHost?: boolean;
/**
* Enable public IP for nodes
*/
readonly enableNodePublicIP?: boolean;
/**
* Resource ID.
*/
readonly id: string;
/**
* KubeletConfig specifies the configuration of kubelet on agent nodes.
*/
readonly kubeletConfig?: outputs.containerservice.v20201201.KubeletConfigResponse;
/**
* KubeletDiskType determines the placement of emptyDir volumes, container runtime data root, and Kubelet ephemeral storage. Currently allows one value, OS, resulting in Kubelet using the OS disk for data.
*/
readonly kubeletDiskType?: string;
/**
* LinuxOSConfig specifies the OS configuration of linux agent nodes.
*/
readonly linuxOSConfig?: outputs.containerservice.v20201201.LinuxOSConfigResponse;
/**
* Maximum number of nodes for auto-scaling
*/
readonly maxCount?: number;
/**
* Maximum number of pods that can run on a node.
*/
readonly maxPods?: number;
/**
* Minimum number of nodes for auto-scaling
*/
readonly minCount?: number;
/**
* AgentPoolMode represents mode of an agent pool
*/
readonly mode?: string;
/**
* The name of the resource that is unique within a resource group. This name can be used to access the resource.
*/
readonly name: string;
/**
* Version of node image
*/
readonly nodeImageVersion: string;
/**
* Agent pool node labels to be persisted across all nodes in agent pool.
*/
readonly nodeLabels?: {[key: string]: string};
/**
* Taints added to new nodes during node pool create and scale. For example, key=value:NoSchedule.
*/
readonly nodeTaints?: string[];
/**
* Version of orchestrator specified when creating the managed cluster.
*/
readonly orchestratorVersion?: string;
/**
* OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
*/
readonly osDiskSizeGB?: number;
/**
* OS disk type to be used for machines in a given agent pool. Allowed values are 'Ephemeral' and 'Managed'. Defaults to 'Managed'. May not be changed after creation.
*/
readonly osDiskType?: string;
/**
* OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
*/
readonly osType?: string;
/**
* Pod SubnetID specifies the VNet's subnet identifier for pods.
*/
readonly podSubnetID?: string;
/**
* Describes whether the Agent Pool is Running or Stopped
*/
readonly powerState: outputs.containerservice.v20201201.PowerStateResponse;
/**
|
/**
* The ID for Proximity Placement Group.
*/
readonly proximityPlacementGroupID?: string;
/**
* ScaleSetEvictionPolicy to be used to specify eviction policy for Spot virtual machine scale set. Default to Delete.
*/
readonly scaleSetEvictionPolicy?: string;
/**
* ScaleSetPriority to be used to specify virtual machine scale set priority. Default to regular.
*/
readonly scaleSetPriority?: string;
/**
* SpotMaxPrice to be used to specify the maximum price you are willing to pay in US Dollars. Possible values are any decimal value greater than zero or -1 which indicates default price to be up-to on-demand.
*/
readonly spotMaxPrice?: number;
/**
* Agent pool tags to be persisted on the agent pool virtual machine scale set.
*/
readonly tags?: {[key: string]: string};
/**
* AgentPoolType represents types of an agent pool
*/
readonly type: string;
/**
* Settings for upgrading the agentpool
*/
readonly upgradeSettings?: outputs.containerservice.v20201201.AgentPoolUpgradeSettingsResponse;
/**
* Size of agent VMs.
*/
readonly vmSize?: string;
/**
* VNet SubnetID specifies the VNet's subnet identifier for nodes and maybe pods
*/
readonly vnetSubnetID?: string;
}
|
* The current deployment or provisioning state, which only appears in the response.
*/
readonly provisioningState: string;
|
main.js
|
/* ========================================================================
* DOM-based Routing
* Based on http://goo.gl/EUTi53 by Paul Irish
*
* Only fires on body classes that match. If a body class contains a dash,
* replace the dash with an underscore when adding it to the object below.
*
* .noConflict()
* The routing is enclosed within an anonymous function so that you can
* always reference jQuery with $, even when in .noConflict() mode.
* ======================================================================== */
(function($) {
// Use this variable to set up the common and page specific functions. If you
// rename this variable, you will also need to rename the namespace below.
var Sage = {
// All pages
'common': {
init: function() {
$(document).ready( function () {
var body = $("body"),
navOverlay = $(".overlay"),
nav = $("nav#off-canvas-nav"),
navIcon = $("#nav-icon"),
subMenu = $("li.menu-item-has-children > a");
navIcon.click(function(e) {
e.preventDefault();
$(this).toggleClass("open");
body.toggleClass("nav-open");
nav.toggleClass("open");
});
navOverlay.click(function() {
navIcon.removeClass("open");
body.removeClass("nav-open");
nav.removeClass("open");
});
subMenu.click(function(drop) {
drop.preventDefault();
$(this).parent().toggleClass("open");
});
$.ajax({
url: 'http://api.openweathermap.org/data/2.5/weather?lat=33.62&lon=-117.93&units=imperial&appid=9951bddf3af7e21abdb61ad50b4325a2',
method: 'GET'
}).done( function (response) {
console.log(response);
// function jsUcfirst(string) {
// return string.charAt(0).toUpperCase() + string.slice(1);
// } [33.62,-117.93]
function
|
(string) {
var icon;
switch(string) {
case 'Clouds':
icon = 'fa-cloud';
break;
case 'Clear':
icon = 'fa-sun-o';
break;
}
return icon;
}
$('.weather').append(
'<h5 class="white">Newport Beach<img src="http://openweathermap.org/img/w/' + response.weather[0].icon + '.png"></h5>' +
'<p class="white">Forecast: ' + response.weather[0].main +
'<br>T: ' + response.main.temp + '° | ' +
'RH: ' + response.main.humidity + '%</p>'
);
});
});//end ready
},
finalize: function() {
// JavaScript to be fired on all pages, after page specific JS is fired
}
},
// Home page
'home': {
init: function() {
var a = 0;
$(window).scroll(function(){
if ($('.hero').length !== 0) {
if($(window).scrollTop() >= 400) {
$('header.header').addClass('blue');
} else {
$('header.header').removeClass('blue');
}
}
var oTop = $('.big').offset().top - window.innerHeight;
if (a === 0 && $(window).scrollTop() > oTop) {
$('.big').each(function() {
var $this = $(this),
countTo = $this.attr('data-count');
$({
countNum: $this.text()
}).animate({
countNum: countTo
},
{
duration: 2000,
easing: 'swing',
step: function() {
$this.text(Math.floor(this.countNum));
},
complete: function() {
$this.text(this.countNum);
//alert('finished');
}
});
});
a = 1;
}
});//end scroll
},
finalize: function() {
// JavaScript to be fired on the home page, after the init JS
}
},
// About us page, note the change from about-us to about_us.
'about_us': {
init: function() {
// JavaScript to be fired on the about us page
}
}
};
// The routing fires all common scripts, followed by the page specific scripts.
// Add additional events for more control over timing e.g. a finalize event
var UTIL = {
fire: function(func, funcname, args) {
var fire;
var namespace = Sage;
funcname = (funcname === undefined) ? 'init' : funcname;
fire = func !== '';
fire = fire && namespace[func];
fire = fire && typeof namespace[func][funcname] === 'function';
if (fire) {
namespace[func][funcname](args);
}
},
loadEvents: function() {
// Fire common init JS
UTIL.fire('common');
// Fire page-specific init JS, and then finalize JS
$.each(document.body.className.replace(/-/g, '_').split(/\s+/), function(i, classnm) {
UTIL.fire(classnm);
UTIL.fire(classnm, 'finalize');
});
// Fire common finalize JS
UTIL.fire('common', 'finalize');
}
};
// Load Events
$(document).ready(UTIL.loadEvents);
})(jQuery); // Fully reference jQuery after this point.
|
icon
|
utils.js
|
const utils = {};
|
utils.recursiveChildFinder = (parentArray = [], allChildren = []) => {
const baseWithChildren = parentArray.map(each => {
const child_menu = allChildren.filter(child => '' + child.parent_menu === '' + each._id) || [];
return { ...each, child_menu };
});
let baseParentIds = [];
baseWithChildren.map(eachBase => {
baseParentIds = [...baseParentIds, ...eachBase.child_menu.map(each => '' + each._id)];
});
if (baseParentIds.length === 0) {
return baseWithChildren;
}
return baseWithChildren.map(each => ({
...each,
child_menu: utils.recursiveChildFinder(each.child_menu, allChildren),
}));
};
module.exports = utils;
| |
utils.go
|
package cmd
import (
"fmt"
"os"
"path"
"path/filepath"
"regexp"
"github.com/apex/log"
)
const (
globPattern = "ecs-*.toml"
configFormat = "ecs-%s.toml"
)
var (
rePattern = regexp.MustCompile(`^ecs-(\w+)\.toml$`)
)
func findInfraDir() (string, error) {
var infra string
dir, err := os.Getwd()
if err != nil {
return "", err
}
for dir != path.Dir(dir) {
infra = path.Join(dir, "infra")
if stat, err := os.Stat(infra); err == nil {
if stat.IsDir() { // directory infra found
if files, _ := filepath.Glob(path.Join(infra, globPattern)); len(files) > 0 {
|
}
}
dir = path.Dir(dir)
}
return "", fmt.Errorf("Can't find directory with config files")
}
func findEnvironments() ([]string, error) {
var envs []string
infra, err := findInfraDir()
if err != nil {
return []string{}, err
}
if files, _ := filepath.Glob(path.Join(infra, globPattern)); len(files) > 0 {
for _, fp := range files {
file := filepath.Base(fp)
match := rePattern.FindStringSubmatch(file)
if match != nil {
envs = append(envs, match[1])
}
}
return envs, nil
}
return []string{}, fmt.Errorf("can't find any environment")
}
func findConfigByEnvironment(environment string) (string, error) {
var filename string
infra, err := findInfraDir()
if err != nil {
return "", err
}
filename = path.Join(infra, fmt.Sprintf(configFormat, environment))
if _, err := os.Stat(filename); err == nil {
log.WithFields(log.Fields{
"config": filename,
}).Debug("Found config!")
return filename, nil
}
return "", fmt.Errorf("'infra/ecs-%s.toml' doesn't exist", environment)
}
|
return infra, nil
}
|
client.py
|
# imports - standard imports
import sys
import os.path as osp
import random
import collections
# imports - third-party imports
from ccapi.util.gevent import patch
patch()
import requests
# from requests_cache.core import CachedSession
import grequests as greq
from grequests import AsyncRequest
# imports - module imports
from ccapi.util.environ import getenv
from ccapi.api.helper import (
_build_model_urls,
_user_response_to_user,
_model_content_to_model,
_model_version_response_to_boolean_model,
_merge_metadata_to_model
)
from ccapi.model.model.base import Model, _ACCEPTED_MODEL_DOMAIN_TYPES
from ccapi.model.user import User
from ccapi.core.querylist import QueryList
from ccapi.core.config import Configuration
from ccapi.constant import (
PATH,
AUTHENTICATION_HEADER,
_AUTHENTICATION_ERROR_STRING
)
from ccapi._compat import (
string_types,
iteritems,
iterkeys,
itervalues,
urlencode
)
from ccapi.util.array import (
sequencify,
squash,
find
)
from ccapi.util._dict import merge_dict
from ccapi.exception import (
TypeError,
AuthenticationError
)
from ccapi.log import get_logger
logger = get_logger()
config = Configuration()
from ccapi.model.model.metabolic import (
ConstraintBasedModel,
Metabolite,
Reaction
)
class Client:
"""
The :class:`Client` class provides a convenient access to the Cell
Collective API. Instances of this class are a gateway to interacting
with Cell Collective's API through the ccapi.
:param base_url: A base URL to use.
:param proxies: A dictionary/list of proxies to use. If a list is passed,
each element in the list should be a dictionary of the format
``{ protocol: ip }``.
:param test: Attempt to test the connection to the base url.
Usage::
>>> import ccapi
>>> client = ccapi.Client()
>>> client
<Client url='https://cellcollective.org'>
"""
def __init__(self,
base_url = None,
proxies = [ ],
test = True,
cache_timeout = None
):
self.base_url = base_url or config.url
self._auth_token = None
# if cache_timeout:
# self._session = CachedSession(
# cache_name = osp.join(PATH["CACHE"], "requests"),
# expire_after = cache_timeout
# )
# else:
self._session = requests.Session()
if proxies and \
not isinstance(proxies, (collections.Mapping, list, tuple)):
raise TypeError((
"proxies %s are not of valid type. You must "
"either a dictionary of a list of dictionaries of the "
"following format { protocol: ip }."))
if isinstance(proxies, collections.Mapping):
proxies = [proxies]
self._proxies = proxies
if test:
self.ping()
def __repr__(self):
repr_ = "<Client url='%s'>" % (self.base_url)
return repr_
def __eq__(self, other):
equals = False
if isinstance(other, Client):
if self.base_url == other.base_url:
if not self.authenticated and not other.authenticated:
equals = True
else:
if self.authenticated:
if other.authenticated:
if self._auth_token == other._auth_token:
equals = True
return equals
@property
def version(self):
"""
Version of the Build Service.
Usage::
>>> import ccapi
>>> client = ccapi.Client()
>>> client.version
'2.6.2'
"""
response = self.request("GET", "api/ping")
if response.ok:
content = response.json()
version = content.get("version")
return version
else:
response.raise_for_status()
def _build_url(self, *args, **kwargs):
params = kwargs.pop("params", None)
prefix = kwargs.get("prefix", True)
parts = [ ]
if prefix:
parts.append(self.base_url)
url = "/".join(map(str, sequencify(parts) + sequencify(args)))
if params:
encoded = urlencode(params)
url += "?%s" % encoded
return url
def request(self, method, url, *args, **kwargs):
raise_error = kwargs.pop("raise_error", True)
token = kwargs.pop("token", None)
headers = kwargs.pop("headers", { })
proxies = kwargs.pop("proxies", self._proxies)
data = kwargs.get("params", kwargs.get("data"))
prefix = kwargs.get("prefix", True)
user_agent = kwargs.get("user_agent", config.user_agent)
async_ = kwargs.pop("async_", False)
headers.update({
"User-Agent": user_agent
})
if token:
headers.update({
AUTHENTICATION_HEADER: token
})
else:
if self._auth_token:
headers.update({
AUTHENTICATION_HEADER: self._auth_token
})
if proxies:
proxies = random.choice(proxies)
logger.info("Using proxy %s to dispatch request." % proxies)
url = self._build_url(url, prefix = prefix)
logger.info("Dispatching a %s request to URL: %s with Arguments - %s" \
% (method, url, kwargs))
if async_:
response = AsyncRequest(method, url, session = self._session,
headers = headers, proxies = proxies, *args, **kwargs)
else:
response = self._session.request(method, url,
headers = headers, proxies = proxies, *args, **kwargs)
if not response.ok and raise_error:
if response.text:
logger.error("Error recieved from the server: %s" % response.text)
response.raise_for_status()
return response
def post(self, url, *args, **kwargs):
"""
Dispatch a POST request to the server.
:param url: URL part (does not include the base URL).
:param args: Arguments provided to ``client.request``
:param kwargs: Keyword Arguments provided to ``client.request``
Usage::
>>> import ccapi
>>> client = ccapi.Client()
>>> response = client.post("api/module/12345/report")
>>> response.content
b'"First Name","Last Name","Email","Institution","Last Updated Date"\n'
"""
response = self.request("POST", url, *args, **kwargs)
return response
def put(self, url, *args, **kwargs):
"""
Dispatch a PUT request to the server.
"""
response = self.request("PUT", url, *args, **kwargs)
return response
def ping(self, *args, **kwargs):
"""
Check if the URL is alive.
:param args: Arguments provided to ``client.request``
:param kwargs: Keyword Arguments provided to ``client.request``
Usage::
>>> import ccapi
>>> client = ccapi.Client()
>>> client.ping()
'pong'
"""
response = self.request("GET", "api/ping", *args, **kwargs)
try:
content = response.json()
if content.get("data") == "pong":
return "pong"
else:
raise ValueError("Unable to ping to URL %s." % self.base_url)
except JSONDecodeError:
raise ResponseError("Unable to decode JSON.")
def auth(self, *args, **kwargs):
|
def logout(self):
"""
Logout client.
"""
self._auth_token = None
@property
def authenticated(self):
_authenticated = bool(self._auth_token)
return _authenticated
def raise_for_authentication(self):
"""
Raise AuthenticationError in case the client hasn't been authenticated.
"""
if not self.authenticated:
raise AuthenticationError("Client is not authenticated.")
def me(self, *args, **kwargs):
"""
Get the user profile of the authenticated client.
Usage::
>>> import ccapi
>>> client = ccapi.Client()
>>> client.auth(email = "[email protected]", password = "test")
>>> client.me()
<User id=10887 name='Test Test'>
"""
response = self.request("GET", "_api/user/getProfile", *args, **kwargs)
content = response.json()
user = _user_response_to_user(content, client = self)
return user
def get(self, resource, *args, **kwargs):
"""
Get resources.
:param resource: Resource name.
"""
_resource = resource.lower()
resources = [ ]
id_ = kwargs.get("id")
query = kwargs.get("query")
raw = kwargs.get("raw", False)
filters = kwargs.get("filters", { })
domain = filters.get("domain", "research")
category = filters.get("category", "published")
modelTypes = filters.get("modelTypes", ["boolean", "metabolic"])
size = kwargs.get("size", config.max_api_resource_fetch)
since = kwargs.get("since", 1)
since = since if since > 0 else 1
orderBy = "recent"
if id_:
if isinstance(id_, string_types) and id_.isdigit():
id_ = int(id_)
id_ = sequencify(id_)
if _resource == "model":
url = self._build_url("api/model/cards/%s" % domain, prefix = False)
params = [
("modelTypes", "&".join(modelTypes)),
("orderBy", orderBy),
("category", category),
("cards", size)
]
if query:
params = params + (
("search", "species"),
("search", "knowledge"),
("name", query)
)
response = self.request("GET", url, params = params)
content = response.json()
if id_:
urls = dict()
version = kwargs.get("version")
hash_ = kwargs.get("hash")
if isinstance(hash_, str):
if not len(id_) == 1:
raise ValueError((
"Hash provided cannot be a string. ",
"To provide multiple hashes, pass a dict of the ",
"format { id: hash }"
))
models = dict()
for i in id_:
model = find(content, lambda x: (x["model"]["id"] if "model" in x else x["id"]) == i)
if not model:
raise ValueError("Model with ID %s not found." % i)
else:
models[i] = dict({
"metadata": model,
"versions": dict()
})
versions = [ ]
if not version:
versions = list(iterkeys(model["model"]["modelVersionMap"])) if "model" in model \
else [model["version"] for model in model["versions"]]
hash_def = None
if not hash_:
hash_def = model.get("hash", None)
urls.update(_build_model_urls(self,
id_ = i, version = version if version else versions,
hash_ = hash_ if hash_ else hash_def
))
arequests = (self.request("GET", url, async_ = True) for url in itervalues(urls))
responses = greq.imap(arequests)
keys = list(iterkeys(urls))
for i, response in enumerate(responses):
_id, _version = list(map(int, keys[i].split("/")))
if response.ok:
json = response.json()
models[_id]["versions"].update({
_version: json
})
else:
logger.warn("Unable to fetch model %s with version %s." % (_id, _version))
user_ids = [ ]
for id_, model in iteritems(models):
metadata = model["metadata"]
if "model" in metadata:
user_id = metadata["model"]["userId"]
user_ids.append(user_id)
for version_id, version_data in iteritems(model["versions"]):
version_data = next(iter(itervalues(version_data)))
for _, share_data in iteritems(version_data["shareMap"]):
user_id = share_data["userId"]
user_ids.append(user_id)
if metadata["uploadMap"]:
for upload_id, upload_data in iteritems(metadata["uploadMap"]):
user_id = upload_data["userId"]
user_ids.append(user_id)
else:
user_id = metadata["_createdBy"]
user_ids.append(user_id)
user_ids = set(user_ids)
users = QueryList(sequencify(self.get("user", id = user_ids)))
resources = QueryList([
_model_content_to_model(model,
users = users, client = self) \
for _, model in iteritems(models)
])
else:
if filters:
if "user" in filters:
user = filters["user"]
if isinstance(user, int):
user = self.get("user", id = user)
if not isinstance(user, User):
raise TypeError("Expected type for user is User \
or ID, type %s found." % type(user))
content = list(filter(lambda x: x["model"]["userId"] == user.id, content))
if "domain" in filters:
domain = filters["domain"]
if domain not in _ACCEPTED_MODEL_DOMAIN_TYPES:
raise TypeError("Not a valid domain type: %s" % domain)
else:
content = list(filter(lambda x: (x["model"]["type"] if "model" in x else x["domainType"]) == domain, content))
from_, to = since - 1, min(len(content), size)
content = content[from_ : from_ + to]
ids = [(data["model"]["id"] if "model" in data else data["id"]) for data in content]
resources = self.get("model", id = ids, size = size)
elif _resource == "user":
if not id_:
raise ValueError("id required.")
response = self.request("GET", "_api/user/lookupUsers",
params = [("id", i) for i in id_]
)
content = response.json()
resources = QueryList([
_user_response_to_user(
merge_dict({ "id": user_id }, user_data),
client = self
) for user_id, user_data in iteritems(content)
])
return squash(resources)
def read(self, filename, type = None, save = False):
"""
Read a model file.
:param filename: Name of the file locally present to read a model file.
:param save: Save model after importing.
"""
type_ = type or config.model_type["value"]
model = Model(client = self)
# HACK: remove default version provided.
model.versions.pop()
if type_ == "boolean":
files = dict({ "file": (filename, open(filename, "rb")) })
response = self.post("api/model/import", files = files)
content = response.json()
boolean, meta = _model_version_response_to_boolean_model(content,
client = self)
model = _merge_metadata_to_model(model, meta)
model.add_version(boolean)
elif type_ == "metabolic":
data = dict(type = type_)
files = [("file", open(filename, "rb"))]
response = self.post("api/model/import", data = data,
files = files)
content = response.json()
data = content["data"]
model = Model(client = self)
# HACK: remove default version provided.
model.versions.pop()
for file_data in data:
print(file_data)
model_data = file_data["data"]
model.id = model_data["id"]
model.name = model_data["name"]
for version in model_data["versions"]:
if model_data["modelType"] == "metabolic":
metabolic = ConstraintBasedModel(
id = model.id, version = version["id"], client = self)
for metabolite in version["metabolites"]:
m = Metabolite(
id = metabolite["id"],
name = metabolite["name"],
compartment = metabolite["compartment"],
formula = metabolite["formula"],
charge = metabolite["charge"],
client = self
)
metabolic.add_metabolite(m)
for reaction in version["reactions"]:
r = Reaction(
id = reaction["id"],
name = reaction["name"],
lower_bound = reaction["lowerBound"],
upper_bound = reaction["upperBound"],
client = self
)
metabolic.add_reaction(r)
model.add_version(metabolic)
else:
raise TypeError("Unknown type %s." % type_)
if save:
model.save()
return model
def search(self, resource, query, *args, **kwargs):
"""
Search a resource.
:param resource: Name of the resource.
:param query: Search a query string.
"""
return self.get(resource, query = query, *args, **kwargs)
|
"""
Authenticate client.
Usage::
>>> import ccapi
>>> client = ccapi.Client()
>>> client.auth(
email = "[email protected]",
password = "test"
)
>>> client.authenticated
True
>>> client.auth(token = "<YOUR_AUTH_TOKEN>")
>>> client.authenticated
True
"""
token = kwargs.get("token", None)
if not token:
email = getenv("AUTH_EMAIL", kwargs.get("email", None))
password = getenv("AUTH_PASSWORD", kwargs.get("password", None))
if not email:
raise ValueError("email not provided.")
if not password:
raise ValueError("password not provided.")
data = dict(username = email, password = password)
response = self.post("_api/login", data = data)
auth_token = response.headers.get(AUTHENTICATION_HEADER)
if auth_token:
self._auth_token = auth_token
else:
raise AuthenticationError(_AUTHENTICATION_ERROR_STRING)
else:
try:
self.me(token = token)
self._auth_token = token
except HTTPError:
raise AuthenticationError(_AUTHENTICATION_ERROR_STRING)
|
generate-seeds.py
|
#!/usr/bin/env python3
# Copyright (c) 2014-2021 The Beans Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>:<port>
[<ipv6>]:<port>
<onion>.onion:<port>
<i2p>.b32.i2p:<port>
The output will be two data structures with the peers in binary format:
static const uint8_t chainparams_seed_{main,test}[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from enum import Enum
import struct
import sys
import os
import re
class
|
(Enum):
IPV4 = 1
IPV6 = 2
TORV2 = 3
TORV3 = 4
I2P = 5
CJDNS = 6
def name_to_bip155(addr):
'''Convert address string to BIP155 (networkID, addr) tuple.'''
if addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) == 10:
return (BIP155Network.TORV2, vchAddr)
elif len(vchAddr) == 35:
assert(vchAddr[34] == 3)
return (BIP155Network.TORV3, vchAddr[:32])
else:
raise ValueError('Invalid onion %s' % vchAddr)
elif addr.endswith('.b32.i2p'):
vchAddr = b32decode(addr[0:-8] + '====', True)
if len(vchAddr) == 32:
return (BIP155Network.I2P, vchAddr)
else:
raise ValueError(f'Invalid I2P {vchAddr}')
elif '.' in addr: # IPv4
return (BIP155Network.IPV4, bytes((int(x) for x in addr.split('.'))))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return (BIP155Network.IPV6, bytes(sub[0] + ([0] * nullbytes) + sub[1]))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s):
'''Convert endpoint string to BIP155 (networkID, addr, port) tuple.'''
match = re.match(r'\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = 0
else:
port = int(port)
host = name_to_bip155(host)
return host + (port, )
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def bip155_serialize(spec):
'''
Serialize (networkID, addr, port) tuple to BIP155 binary format.
'''
r = b""
r += struct.pack('B', spec[0].value)
r += ser_compact_size(len(spec[1]))
r += spec[1]
r += struct.pack('>H', spec[2])
return r
def process_nodes(g, f, structname):
g.write('static const uint8_t %s[] = {\n' % structname)
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
spec = parse_spec(line)
blob = bip155_serialize(spec)
hoststr = ','.join(('0x%02x' % b) for b in blob)
g.write(f' {hoststr},\n')
g.write('};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BEANS_CHAINPARAMSSEEDS_H\n')
g.write('#define BEANS_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the beans network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a BIP155 serialized (networkID, addr, port) tuple.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'chainparams_seed_main')
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'chainparams_seed_test')
g.write('#endif // BEANS_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
BIP155Network
|
addrs.go
|
package libp2p
import (
"context"
"github.com/libp2p/go-libp2p-core/peer"
"github.com/pkg/errors"
"github.com/libp2p/test-plans/dht/utils"
"github.com/testground/sdk-go/sync"
)
func ShareAddresses(ctx context.Context, ri *utils.RunInfo, nodeInfo *NodeInfo) (map[peer.ID]*NodeInfo, error)
|
type NodeInfo struct {
Seq int // sequence number within the test
GroupSeq int // sequence number within the test group
Group string
Addrs *peer.AddrInfo
}
// PeerAttribTopic represents a subtree under the test run's sync tree where peers
// participating in this distributed test advertise their attributes.
var PeerAttribTopic = sync.NewTopic("attribs", &NodeInfo{})
|
{
otherNodes := make(map[peer.ID]*NodeInfo)
subCtx, cancel := context.WithCancel(ctx)
defer cancel()
attribCh := make(chan *NodeInfo)
if _, _, err := ri.Client.PublishSubscribe(subCtx, PeerAttribTopic, nodeInfo, attribCh); err != nil {
return nil, errors.Wrap(err, "peer attrib publish/subscribe failure")
}
for i := 0; i < ri.RunEnv.TestInstanceCount; i++ {
select {
case info := <-attribCh:
if info.Seq == nodeInfo.Seq {
continue
}
otherNodes[info.Addrs.ID] = info
case <-ctx.Done():
return nil, ctx.Err()
}
}
return otherNodes, nil
}
|
plugin.ts
|
import * as BabelCore from '@babel/core';
import * as BabelTypes from '@babel/types';
import { parseCommentHints, CommentHint } from './comments';
import Extractors, {
EXTRACTORS_PRIORITIES,
ExtractionError,
} from './extractors';
import { computeDerivedKeys, ExtractedKey, TranslationKey } from './keys';
import { Config, parseConfig } from './config';
import exportTranslationKeys, {
ExporterCache,
createExporterCache,
} from './exporters';
import { PLUGIN_NAME } from './constants';
import extractWithTranslationHOC from './extractors/withTranslationHOC';
export interface VisitorState {
// Options inherited from Babel.
file: any; // eslint-disable-line @typescript-eslint/no-explicit-any
opts: Partial<Config>;
// This app state.
I18NextExtract: I18NextExtractState;
}
interface I18NextExtractState {
extractedKeys: ExtractedKey[];
commentHints: CommentHint[];
config: Config;
exporterCache: ExporterCache;
}
/**
* Handle the extraction.
*
* In case of ExtractionError occurring in the callback, a useful error
* message will display and extraction will continue.
*
* @param path Current node path.
* @param state Current visitor state.
* @param callback Function to call that may throw ExtractionError.
*/
function
|
<T>(
path: BabelCore.NodePath,
state: VisitorState,
callback: (collect: (keys: ExtractedKey[]) => void) => T,
): T | undefined {
const filename = (state.file && state.file.opts.filename) || '???';
const lineNumber = (path.node.loc && path.node.loc.start.line) || '???';
const extractState = state.I18NextExtract;
const collect = (newKeysCandidates: ExtractedKey[]): void => {
const currentKeys = extractState.extractedKeys;
const newKeys = Array<ExtractedKey>();
for (const newKeyCandidate of newKeysCandidates) {
const conflictingKeyIndex = currentKeys.findIndex(extractedKey =>
extractedKey.sourceNodes.some(extractedNode =>
newKeyCandidate.sourceNodes.includes(extractedNode),
),
);
if (conflictingKeyIndex !== -1) {
const conflictingKey = currentKeys[conflictingKeyIndex];
const conflictingKeyPriority = -EXTRACTORS_PRIORITIES.findIndex(
v => v === conflictingKey.extractorName,
);
const newKeyPriority = -EXTRACTORS_PRIORITIES.findIndex(
v => v === newKeyCandidate.extractorName,
);
if (newKeyPriority <= conflictingKeyPriority) {
// Existing key priority is higher than the extracted key priority.
// Skip.
continue;
}
// Remove the conflicting key from the extracted keys
currentKeys.splice(conflictingKeyIndex, 1);
}
newKeys.push(newKeyCandidate);
}
currentKeys.push(...newKeys);
};
try {
return callback(collect);
} catch (err) {
if (!(err instanceof ExtractionError)) {
throw err;
}
// eslint-disable-next-line no-console
console.warn(
`${PLUGIN_NAME}: Extraction error in ${filename} at line ` +
`${lineNumber}. ${err.message}`,
);
}
}
const Visitor: BabelCore.Visitor<VisitorState> = {
CallExpression(path, state: VisitorState) {
const extractState = this.I18NextExtract;
handleExtraction(path, state, collect => {
collect(
Extractors.extractUseTranslationHook(
path,
extractState.config,
extractState.commentHints,
),
);
collect(
Extractors.extractI18nextInstance(
path,
extractState.config,
extractState.commentHints,
),
);
collect(
Extractors.extractTFunction(
path,
extractState.config,
extractState.commentHints,
),
);
});
},
JSXElement(path, state: VisitorState) {
const extractState = this.I18NextExtract;
handleExtraction(path, state, collect => {
collect(
Extractors.extractTranslationRenderProp(
path,
extractState.config,
extractState.commentHints,
),
);
collect(
Extractors.extractTransComponent(
path,
extractState.config,
extractState.commentHints,
),
);
});
},
ClassDeclaration(path, state: VisitorState) {
const extractState = this.I18NextExtract;
handleExtraction(path, state, collect => {
collect(
extractWithTranslationHOC(
path,
extractState.config,
extractState.commentHints,
),
);
});
},
Function(path, state: VisitorState) {
const extractState = this.I18NextExtract;
handleExtraction(path, state, collect => {
collect(
extractWithTranslationHOC(
path,
extractState.config,
extractState.commentHints,
),
);
});
},
};
export default function(
api: BabelCore.ConfigAPI,
): BabelCore.PluginObj<VisitorState> {
api.assertVersion(7);
// This is a cache for the exporter to keep track of the translation files.
// It must remain global and persist across transpiled files.
const exporterCache = createExporterCache();
return {
pre() {
this.I18NextExtract = {
config: parseConfig(this.opts),
extractedKeys: [],
commentHints: [],
exporterCache,
};
},
post() {
const extractState = this.I18NextExtract;
if (extractState.extractedKeys.length === 0) return;
for (const locale of extractState.config.locales) {
const derivedKeys = extractState.extractedKeys.reduce(
(accumulator, k) => [
...accumulator,
...computeDerivedKeys(k, locale, extractState.config),
],
Array<TranslationKey>(),
);
exportTranslationKeys(
derivedKeys,
locale,
extractState.config,
extractState.exporterCache,
);
}
},
visitor: {
Program(path, state: VisitorState) {
// FIXME can't put this in Visitor because `path.traverse()` on a
// Program node doesn't call the visitor for Program node.
if (BabelTypes.isFile(path.container)) {
this.I18NextExtract.commentHints = parseCommentHints(
path.container.comments,
);
}
path.traverse(Visitor, state);
},
},
};
}
|
handleExtraction
|
on_enter.rs
|
//! Handles the `Enter` key press. At the momently, this only continues
//! comments, but should handle indent some time in the future as well.
use base_db::{FilePosition, SourceDatabase};
use ide_db::RootDatabase;
use syntax::{
ast::{self, AstToken},
AstNode, SmolStr, SourceFile,
SyntaxKind::*,
SyntaxToken, TextRange, TextSize, TokenAtOffset,
};
use test_utils::mark;
use text_edit::TextEdit;
// Feature: On Enter
//
// rust-analyzer can override kbd:[Enter] key to make it smarter:
//
// - kbd:[Enter] inside triple-slash comments automatically inserts `///`
// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//`
//
// This action needs to be assigned to shortcut explicitly.
//
// VS Code::
//
// Add the following to `keybindings.json`:
// [source,json]
// ----
// {
// "key": "Enter",
// "command": "rust-analyzer.onEnter",
// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
// }
// ----
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let parse = db.parse(position.file_id);
let file = parse.tree();
let comment = file
.syntax()
.token_at_offset(position.offset)
.left_biased()
.and_then(ast::Comment::cast)?;
if comment.kind().shape.is_block() {
return None;
}
let prefix = comment.prefix();
let comment_range = comment.syntax().text_range();
if position.offset < comment_range.start() + TextSize::of(prefix) {
return None;
}
let mut remove_last_space = false;
// Continuing single-line non-doc comments (like this one :) ) is annoying
if prefix == "//" && comment_range.end() == position.offset {
if comment.text().ends_with(' ') {
mark::hit!(continues_end_of_line_comment_with_space);
remove_last_space = true;
} else if !followed_by_comment(&comment) {
return None;
}
}
let indent = node_indent(&file, comment.syntax())?;
let inserted = format!("\n{}{} $0", indent, prefix);
let delete = if remove_last_space {
TextRange::new(position.offset - TextSize::of(' '), position.offset)
} else {
TextRange::empty(position.offset)
};
let edit = TextEdit::replace(delete, inserted);
Some(edit)
}
fn followed_by_comment(comment: &ast::Comment) -> bool {
let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) {
Some(it) => it,
None => return false,
};
if ws.spans_multiple_lines() {
return false;
}
ws.syntax().next_token().and_then(ast::Comment::cast).is_some()
}
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
let ws = match file.syntax().token_at_offset(token.text_range().start()) {
TokenAtOffset::Between(l, r) => {
assert!(r == *token);
l
}
TokenAtOffset::Single(n) => {
assert!(n == *token);
return Some("".into());
}
TokenAtOffset::None => unreachable!(),
};
if ws.kind() != WHITESPACE {
return None;
}
let text = ws.text();
let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
Some(text[pos..].into())
}
#[cfg(test)]
mod tests {
use stdx::trim_indent;
use test_utils::{assert_eq_text, mark};
use crate::mock_analysis::analysis_and_position;
fn apply_on_enter(before: &str) -> Option<String> {
let (analysis, position) = analysis_and_position(&before);
let result = analysis.on_enter(position).unwrap()?;
let mut actual = analysis.file_text(position.file_id).unwrap().to_string();
result.apply(&mut actual);
Some(actual)
}
fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) {
let ra_fixture_after = &trim_indent(ra_fixture_after);
let actual = apply_on_enter(ra_fixture_before).unwrap();
assert_eq_text!(ra_fixture_after, &actual);
}
fn do_check_noop(ra_fixture_text: &str) {
assert!(apply_on_enter(ra_fixture_text).is_none())
}
#[test]
fn continues_doc_comment() {
do_check(
r"
/// Some docs<|>
fn foo() {
}
",
r"
/// Some docs
/// $0
fn foo() {
}
",
);
do_check(
r"
impl S {
/// Some<|> docs.
fn foo() {}
}
",
r"
impl S {
/// Some
/// $0 docs.
fn foo() {}
}
",
);
do_check(
r"
///<|> Some docs
fn foo() {
}
",
r"
///
/// $0 Some docs
fn foo() {
}
",
);
}
#[test]
fn does_not_continue_before_doc_comment() {
do_check_noop(r"<|>//! docz");
}
#[test]
fn continues_code_comment_in_the_middle_of_line() {
do_check(
r"
fn main() {
// Fix<|> me
let x = 1 + 1;
}
",
r"
fn main() {
// Fix
// $0 me
let x = 1 + 1;
}
",
);
}
#[test]
fn continues_code_comment_in_the_middle_several_lines()
|
#[test]
fn does_not_continue_end_of_line_comment() {
do_check_noop(
r"
fn main() {
// Fix me<|>
let x = 1 + 1;
}
",
);
}
#[test]
fn continues_end_of_line_comment_with_space() {
mark::check!(continues_end_of_line_comment_with_space);
do_check(
r#"
fn main() {
// Fix me <|>
let x = 1 + 1;
}
"#,
r#"
fn main() {
// Fix me
// $0
let x = 1 + 1;
}
"#,
);
}
}
|
{
do_check(
r"
fn main() {
// Fix<|>
// me
let x = 1 + 1;
}
",
r"
fn main() {
// Fix
// $0
// me
let x = 1 + 1;
}
",
);
}
|
superscripts.rs
|
//Copyright 2020 WHTCORPS INC
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std::fmt::{
Display,
Formatter,
Write,
};
use isolated_namespace::IsolatedNamespace;
#[macro_export]
macro_rules! ns_keyword {
($ns: expr, $name: expr) => {{
$crate::Keyword::namespaced($ns, $name)
}}
}
#[derive(Clone,Debug,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub struct PlainSymbol(pub String);
#[derive(Clone,Debug,Eq,Hash,Ord,PartialOrd,PartialEq)]
pub struct NamespacedSymbol(IsolatedNamespace);
/// ```rust
/// # use edbn::superscripts::Keyword;
/// let bar = Keyword::plain("bar"); // :bar
/// let foo_bar = Keyword::namespaced("foo", "bar"); // :foo/bar
/// assert_eq!("bar", bar.name());
/// assert_eq!(None, bar.namespace());
/// assert_eq!("bar", foo_bar.name());
/// assert_eq!(Some("foo"), foo_bar.namespace());
#[derive(Clone,Debug,Eq,Hash,Ord,PartialOrd,PartialEq)]
#[cfg_attr(feature = "serde_support", derive(Serialize, Deserialize))]
pub struct Keyword(IsolatedNamespace)
impl PlainSymbol {
pub fn plain<T>(name: T) -> Self where T: Into<String> {
let n = name.into();
assert!(!n.is_empty(), "Symbols cannot be unnamed.");
PlainSymbol(n)
}
pub fn name(&self) -> &str {
if self.is_src_symbol() || self.is_var_symbol() {
&self.0[1..]
} else
|
}
#[inline]
pub fn is_var_symbol(&self) -> bool {
self.0.starts_with('?')
}
#[inline]
pub fn is_src_symbol(&self) -> bool {
self.0.starts_with('$')
}
}
impl NamespacedSymbol {
pub fn namespaced<N, T>(namespace: N, name: T) -> Self where N: AsRef<str>, T: AsRef<str> {
let r = namespace.as_ref();
assert!(!r.is_empty(), "Namespaced symbols cannot have an empty non-null namespace.");
NamespacedSymbol(NamespaceableName::namespaced(r, name))
}
#[inline]
pub fn name(&self) -> &str {
self.0.name()
}
#[inline]
pub fn namespace(&self) -> &str {
self.0.namespace().unwrap()
}
#[inline]
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
self.0.components()
}
}
impl Keyword {
pub fn plain<T>(name: T) -> Self where T: Into<String> {
Keyword(NamespaceableName::plain(name))
}
}
impl Keyword {
/// Creates a new `Keyword`.
///
/// # Examples
///
/// ```rust
/// # use edbn::superscripts::Keyword;
/// let keyword = Keyword::namespaced("foo", "bar");
/// assert_eq!(keyword.to_string(), ":foo/bar");
/// ```
///
/// See also the `kw!` macro in the main `einsteindb` crate.
pub fn namespaced<N, T>(namespace: N, name: T) -> Self where N: AsRef<str>, T: AsRef<str> {
let r = namespace.as_ref();
assert!(!r.is_empty(), "Namespaced keywords cannot have an empty non-null namespace.");
Keyword(NamespaceableName::namespaced(r, name))
}
#[inline]
pub fn name(&self) -> &str {
self.0.name()
}
#[inline]
pub fn namespace(&self) -> Option<&str> {
self.0.namespace()
}
#[inline]
pub fn components<'a>(&'a self) -> (&'a str, &'a str) {
self.0.components()
}
/// Whether this `Keyword` should be interpreted in reverse order. For example,
/// the two following snippets are identical:
///
/// ```edbn
/// [?y :person/friend ?x]
/// [?x :person/hired ?y]
///
/// [?y :person/friend ?x]
/// [?y :person/_hired ?x]
/// ```
///
/// # Examples
///
/// ```rust
/// # use edbn::superscripts::Keyword;
/// assert!(!Keyword::namespaced("foo", "bar").is_backward());
/// assert!(Keyword::namespaced("foo", "_bar").is_backward());
/// ```
#[inline]
pub fn is_backward(&self) -> bool {
self.0.is_backward()
}
/// Whether this `Keyword` should be interpreted in forward order.
/// See `symbols::Keyword::is_backward`.
///
/// # Examples
///
/// ```rust
/// # use edbn::superscripts::Keyword;
/// assert!(Keyword::namespaced("foo", "bar").is_forward());
/// assert!(!Keyword::namespaced("foo", "_bar").is_forward());
/// ```
#[inline]
pub fn is_forward(&self) -> bool {
self.0.is_forward()
}
#[inline]
pub fn is_namespaced(&self) -> bool {
self.0.is_namespaced()
}
/// Returns a `Keyword` with the same namespace and a
/// 'backward' name. See `superscripts::Keyword::is_backward`.
///
/// Returns a forward name if passed a reversed keyword; i.e., this
/// function is its own inverse.
///
/// # Examples
///
/// ```rust
/// # use edbn::superscripts::Keyword;
/// let nsk = Keyword::namespaced("foo", "bar");
/// assert!(!nsk.is_backward());
/// assert_eq!(":foo/bar", nsk.to_string());
///
/// let reversed = nsk.to_reversed();
/// assert!(reversed.is_backward());
/// assert_eq!(":foo/_bar", reversed.to_string());
/// ```
pub fn to_reversed(&self) -> Keyword {
Keyword(self.0.to_reversed())
}
pub fn unreversed(&self) -> Option<Keyword> {
if self.is_backward() {
Some(self.to_reversed())
} else {
None
}
}
}
impl Display for PlainSymbol {
/// Print the symbol in EDBN format.
///
/// # Examples
///
/// ```rust
/// # use edbn::superscripts::PlainSymbol;
/// assert_eq!("baz", PlainSymbol::plain("baz").to_string());
/// ```
fn fmt(&self, f: &mut Formatter) -> ::std::fmt::Result {
self.0.fmt(f)
}
}
|
{
&self.0
}
|
test_notifications.py
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from endorsement.test.support import SupportApiTest
class TestSupportNotifications(SupportApiTest):
@property
def reverse_id(self):
return 'endorsee_notifications'
def test_statistics(self):
self._test_good_page()
def test_bogus_user_statistics(self):
|
self._test_invalid_user()
|
|
main_old.py
|
from __future__ import unicode_literals
import os
import asyncio
import subprocess
import youtube_dl
from Python_ARQ import ARQ
from pytgcalls import GroupCall
from sys import version as pyver
from pyrogram import Client, filters
from misc import HELP_TEXT, START_TEXT, REPO_TEXT
from functions import (
transcode,
download_and_transcode_song,
convert_seconds,
time_to_seconds,
generate_cover,
generate_cover_square,
)
# TODO Make it look less messed up
is_config = os.path.exists("config.py")
if is_config:
from config import (
API_ID, API_HASH,
SUDO_CHAT_ID,
SUDOERS, ARQ_API, HEROKU
)
elif not is_config:
from sample_config import (
API_ID, API_HASH,
SUDO_CHAT_ID,
SUDOERS, ARQ_API, HEROKU
)
if HEROKU:
if is_config:
from config import SESSION_STRING
elif not is_config:
from sample_config import SESSION_STRING
queue = [] # This is where the whole song queue is stored
playing = False # Tells if something is playing or not
# Pyrogram Client
if not HEROKU:
app = Client("tgvc", api_id=API_ID, api_hash=API_HASH)
else:
app = Client(SESSION_STRING, api_id=API_ID, api_hash=API_HASH)
# Pytgcalls Client
vc = GroupCall(
client=app,
input_filename="input.raw",
play_on_repeat=True,
enable_logs_to_console=False,
)
# Arq Client
arq = ARQ(ARQ_API)
async def delete(message):
await asyncio.sleep(10)
await message.delete()
@app.on_message(filters.command("start") & filters.user(SUDOERS))
async def start(_, message):
await send(START_TEXT)
@app.on_message(filters.command("help") & filters.user(SUDOERS))
async def help(_, message):
await send(HELP_TEXT)
@app.on_message(filters.command("repo") & filters.user(SUDOERS))
async def repo(_, message):
await send(REPO_TEXT)
@app.on_message(filters.command("joinvc") & filters.user(SUDOERS))
async def joinvc(_, message):
try:
if vc.is_connected:
await send("__**Allaqachon ovozli chatdaman.**__")
return
chat_id = message.chat.id
await vc.start(chat_id)
await send("__**Ovozli chatga qo'shildim.**__")
except Exception as e:
print(str(e))
await send(str(e))
@app.on_message(filters.command("rejoinvc") & filters.user(SUDOERS))
async def joinvc(_, message):
try:
if vc.is_connected:
await send("__**Allaqachon ovozli chatdaman.**__")
return
chat_id = message.chat.id
await vc.reconnect()
await send("__**Ovozli chatga qo'shildim.**__")
except Exception as e:
print(str(e))
await send(str(e))
@app.on_message(filters.command("leavevc") & filters.user(SUDOERS))
async def leavevc(_, message):
if not vc.is_connected:
await send("__**Ovozli chatdan allaqachon chiqib ketganman.**__")
return
await vc.leave_current_group_call()
await vc.stop()
await send("__**Ovozli chatni tark etdim , yangilanish....**__")
os.execvp(
f"python{str(pyver.split(' ')[0])[:3]}",
[f"python{str(pyver.split(' ')[0])[:3]}", "main.py"],
)
@app.on_message(filters.command("update") & filters.user(SUDOERS))
async def update_restart(_, message):
await send(
f'```{subprocess.check_output(["git", "pull"]).decode("UTF-8")}```'
)
os.execvp(
f"python{str(pyver.split(' ')[0])[:3]}",
[f"python{str(pyver.split(' ')[0])[:3]}", "main.py"],
)
@app.on_message(filters.command("pause") & filters.user(SUDOERS))
async def pause_song(_, message):
vc.pause_playout()
await send("**To'xtatildi, davom ettirish uchun /resume buyrug'ini bering.**")
@app.on_message(filters.command("resume") & filters.chat(SUDO_CHAT_ID))
async def resume_song(_, message):
vc.resume_playout()
await send("**Davom etmoqda, to'xtatish uchun /pause buyrug'ini bering.**")
@app.on_message(filters.command("volume") & filters.user(SUDOERS))
async def volume_bot(_, message):
usage = "**Ishlatish uchun:**\n/volume [1-200] yozing"
if len(message.command) != 2:
await send(usage)
return
volume = int(message.text.split(None, 1)[1])
if (volume < 1) or (volume > 200):
await send(usage)
return
try:
await vc.set_my_volume(volume=volume)
except ValueError:
await send(usage)
return
await send(f"**Volume Set To {volume}**")
@app.on_message(filters.command("play") & filters.chat(SUDO_CHAT_ID))
async def queuer(_, message):
usage = "**Usage:**\n__**/play youtube Qo'shiq_Nomi**__"
if len(message.command) < 3:
await send(usage)
return
text = message.text.split(None, 2)[1:]
service = text[0].lower()
song_name = text[1]
requested_by = message.from_user.first_name
services = ["youtube", "deezer", "saavn"]
if service not in services:
await send(usage)
return
if len(queue) > 0:
await message.delete()
await send("__**Navbatga qo'shdim.__**")
queue.append(
{
"service": service,
"song": song_name,
"requested_by": requested_by,
}
)
await play()
return
await message.delete()
queue.append(
{
"service": service,
"song": song_name,
"requested_by": requested_by,
}
)
await play()
@app.on_message(
filters.command("skip") & filters.user(SUDOERS) & ~filters.edited
)
async def skip(_, message):
global playing
if len(queue) == 0:
await send("__**Navbat bo'm-bo'sh.**__")
return
playing = False
await send("__**Keyingisiga o'tkazildi!**__")
await play()
@app.on_message(filters.command("queue") & filters.chat(SUDO_CHAT_ID))
async def queue_list(_, message):
if len(queue) != 0:
i = 1
text = ""
for song in queue:
text += f"**{i}. Platforma:** __**{song['service']}**__ " \
+ f"| **Musiqa:** __**{song['song']}**__\n"
i += 1
m = await send(text)
await delete(message)
await m.delete()
else:
m = await send("__**Navbatda musiqa yo'q.**__")
await delete(message)
await m.delete()
# Queue handler
async def play():
global queue, playing
while not playing:
await asyncio.sleep(2)
if len(queue) != 0:
service = queue[0]["service"]
song = queue[0]["song"]
requested_by = queue[0]["requested_by"]
if service == "youtube":
playing = True
del queue[0]
try:
await ytplay(requested_by, song)
except Exception as e:
print(str(e))
await send(str(e))
playing = False
pass
elif service == "saavn":
playing = True
del queue[0]
try:
await jiosaavn(requested_by, song)
except Exception as e:
print(str(e))
await send(str(e))
playing = False
pass
elif service == "deezer":
playing = True
del queue[0]
try:
await deezer(requested_by, song)
except Exception as e:
print(str(e))
await send(str(e))
playing = False
pass
# Deezer----------------------------------------------------------------------------------------
async def deezer(requested_by, query):
global playing
m = await send(f"__**Searching for {query} on Deezer.**__")
try:
songs = await arq.deezer(query, 1)
title = songs[0].title
duration = convert_seconds(int(songs[0].duration))
thumbnail = songs[0].thumbnail
artist = songs[0].artist
url = songs[0].url
except Exception:
await m.edit("__**Found No Song Matching Your Query.**__")
playing = False
return
await m.edit("__**Generating Thumbnail.**__")
await generate_cover_square(
requested_by, title, artist, duration, thumbnail
)
await m.edit("__**Downloading And Transcoding.**__")
await download_and_transcode_song(url)
await m.delete()
caption = f"🏷 **Name:** [{title[:35]}]({url})\n⏳ **Duration:** {duration}\n" \
+ f"🎧 **Requested By:** {requested_by}\n📡 **Platform:** Deezer"
m = await app.send_photo(
chat_id=SUDO_CHAT_ID,
photo="final.png",
caption=caption,
)
os.remove("final.png")
await asyncio.sleep(int(songs[0]["duration"]))
await m.delete()
playing = False
# Jiosaavn--------------------------------------------------------------------------------------
async def jiosaavn(requested_by, query):
global playing
m = await send(f"__**Searching for {query} on JioSaavn.**__")
try:
songs = await arq.saavn(query)
sname = songs[0].song
slink = songs[0].media_url
ssingers = songs[0].singers
sthumb = songs[0].image
sduration = songs[0].duration
sduration_converted = convert_seconds(int(sduration))
except Exception as e:
await m.edit("__**Found No Song Matching Your Query.**__")
print(str(e))
playing = False
return
await m.edit("__**Processing Thumbnail.**__")
await generate_cover_square(
requested_by, sname, ssingers, sduration_converted, sthumb
)
await m.edit("__**Downloading And Transcoding.**__")
await download_and_transcode_song(slink)
await m.delete()
caption = f"🏷 **Name:** {sname[:35]}\n⏳ **Duration:** {sduration_converted}\n" \
+ f"🎧 **Requested By:** {requested_by}\n📡 **Platform:** JioSaavn"
m = await app.send_photo(
chat_id=SUDO_CHAT_ID,
caption=caption,
photo="final.png",
)
os.remove("final.png")
await asyncio.sleep(int(sduration))
await m.delete()
playing = False
# Youtube Play-----------------------------------------------------
async def ytplay(requested_by, query):
global playing
ydl_opts = {"format": "bestaudio"}
m = await send(f"__**{query} YouTubedan izlanmoqda.**__")
try:
results = await arq.youtube(query)
link = f"https://youtube.com{results[0].url_suffix}"
title = results[0].title
thumbnail = results[0].thumbnails[0]
duration = results[0].duration
views = results[0].views
await app.update_profile(first_name=f"🔉{title} ",bio = f"{title} ijro etilmoqda")
if time_to_seconds(duration) >= 1800:
await m.edit("__**Yo'q, faqat 30 daqiqadan oshmagan musiqalar mumkin.**__")
playing = False
return
except Exception as e:
await m.edit("__**Siz izlagan musiqa topilmadi.**__")
playing = False
print(str(e))
return
await m.edit("__**1 soniya.**__")
await generate_cover(requested_by, title, views, duration, thumbnail)
await m.edit("__**yuklanmoqda ....**__")
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info_dict = ydl.extract_info(link, download=False)
audio_file = ydl.prepare_filename(info_dict)
ydl.process_info(info_dict)
await m.edit("__**1 soniya.**__")
os.rename(audio_file, "audio.webm")
transcode("audio.webm")
await m.delete()
caption = f"🏷 **Nomi:** [{title[:35]}]({link})\n⏳ **Davomiyligi:** {duration}\n" \
+ f"🎧 {requested_by} **tomonidan ijro etildi**\n📡 **Platforma:** YouTube"
await app.set_profile_photo(photo="final.png")
m = await app.send_photo(
chat_id=SUDO_CHAT_ID,
caption=caption,
photo="final.png",
)
os.remove("final.png")
await asyncio.sleep(int(time_to_seconds(duration)))
playing = False
await m.delete()
# Telegram Audio------------------------------------
@app.on_message(
filters.command("telegram") & filters.user(SUDOERS) & ~filters.edited
)
async def tgplay(_, message):
global play
|
if len(queue) != 0:
await send("__**You Can Only Play Telegram Files After The Queue Gets "
+ "Finished.**__")
return
if not message.reply_to_message:
await send("__**Reply to an audio.**__")
return
if message.reply_to_message.audio:
if int(message.reply_to_message.audio.file_size) >= 104857600:
await send("__**Bruh! Only songs within 100 MB.**__")
playing = False
return
duration = message.reply_to_message.audio.duration
if not duration:
await send("__**Only Songs With Duration Are Supported.**__")
return
m = await send("__**Downloading.**__")
song = await message.reply_to_message.download()
await m.edit("__**Transcoding.**__")
transcode(song)
await m.edit(f"**Playing** __**{message.reply_to_message.link}.**__")
await asyncio.sleep(duration)
playing = False
return
await send("__**Only Audio Files (Not Document) Are Supported.**__")
async def send(text):
m = await app.send_message(
SUDO_CHAT_ID, text=text, disable_web_page_preview=True
)
return m
print(
"\nBot Starting..."
)
app.run()
|
ing
|
main.rs
|
// DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
extern crate tokio;
#[macro_use]
extern crate clap;
extern crate yup_oauth2 as oauth2;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use google_spectrum1_explorer::{api, Error};
mod client;
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::Spectrum<hyper::Client<hyper_rustls::HttpsConnector<hyper::client::connect::HttpConnector>, hyper::body::Body>
>,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
async fn _paws_get_spectrum(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"antenna.height" => Some(("antenna.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"antenna.height-type" => Some(("antenna.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"antenna.height-uncertainty" => Some(("antenna.heightUncertainty", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-category" => Some(("deviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-emissions-class" => Some(("deviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-type" => Some(("deviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-technology-id" => Some(("deviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-id" => Some(("deviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-tvbd-device-type" => Some(("deviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.manufacturer-id" => Some(("deviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.model-id" => Some(("deviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.ruleset-ids" => Some(("deviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"device-desc.serial-number" => Some(("deviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"location.confidence" => Some(("location.confidence", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"location.point.center.latitude" => Some(("location.point.center.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.center.longitude" => Some(("location.point.center.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.orientation" => Some(("location.point.orientation", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-major-axis" => Some(("location.point.semiMajorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-minor-axis" => Some(("location.point.semiMinorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-device-category" => Some(("masterDeviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-device-emissions-class" => Some(("masterDeviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-device-type" => Some(("masterDeviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-technology-id" => Some(("masterDeviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.fcc-id" => Some(("masterDeviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.fcc-tvbd-device-type" => Some(("masterDeviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.manufacturer-id" => Some(("masterDeviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.model-id" => Some(("masterDeviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.ruleset-ids" => Some(("masterDeviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"master-device-desc.serial-number" => Some(("masterDeviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.code" => Some(("owner.operator.adr.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.country" => Some(("owner.operator.adr.country", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.locality" => Some(("owner.operator.adr.locality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.pobox" => Some(("owner.operator.adr.pobox", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.region" => Some(("owner.operator.adr.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.street" => Some(("owner.operator.adr.street", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.email.text" => Some(("owner.operator.email.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.fn" => Some(("owner.operator.fn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.org.text" => Some(("owner.operator.org.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.tel.uri" => Some(("owner.operator.tel.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.code" => Some(("owner.owner.adr.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.country" => Some(("owner.owner.adr.country", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.locality" => Some(("owner.owner.adr.locality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.pobox" => Some(("owner.owner.adr.pobox", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.region" => Some(("owner.owner.adr.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.street" => Some(("owner.owner.adr.street", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.email.text" => Some(("owner.owner.email.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.fn" => Some(("owner.owner.fn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.org.text" => Some(("owner.owner.org.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.tel.uri" => Some(("owner.owner.tel.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"request-type" => Some(("requestType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["adr", "antenna", "center", "code", "confidence", "country", "device-desc", "email", "etsi-en-device-category", "etsi-en-device-emissions-class", "etsi-en-device-type", "etsi-en-technology-id", "fcc-id", "fcc-tvbd-device-type", "fn", "height", "height-type", "height-uncertainty", "latitude", "locality", "location", "longitude", "manufacturer-id", "master-device-desc", "model-id", "operator", "org", "orientation", "owner", "pobox", "point", "region", "request-type", "ruleset-ids", "semi-major-axis", "semi-minor-axis", "serial-number", "street", "tel", "text", "type", "uri", "version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::PawsGetSpectrumRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.paws().get_spectrum(request);
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _paws_get_spectrum_batch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"antenna.height" => Some(("antenna.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"antenna.height-type" => Some(("antenna.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"antenna.height-uncertainty" => Some(("antenna.heightUncertainty", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-category" => Some(("deviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-emissions-class" => Some(("deviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-type" => Some(("deviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-technology-id" => Some(("deviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-id" => Some(("deviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-tvbd-device-type" => Some(("deviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.manufacturer-id" => Some(("deviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.model-id" => Some(("deviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.ruleset-ids" => Some(("deviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"device-desc.serial-number" => Some(("deviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-device-category" => Some(("masterDeviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-device-emissions-class" => Some(("masterDeviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-device-type" => Some(("masterDeviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.etsi-en-technology-id" => Some(("masterDeviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.fcc-id" => Some(("masterDeviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.fcc-tvbd-device-type" => Some(("masterDeviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.manufacturer-id" => Some(("masterDeviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.model-id" => Some(("masterDeviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"master-device-desc.ruleset-ids" => Some(("masterDeviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"master-device-desc.serial-number" => Some(("masterDeviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.code" => Some(("owner.operator.adr.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.country" => Some(("owner.operator.adr.country", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.locality" => Some(("owner.operator.adr.locality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.pobox" => Some(("owner.operator.adr.pobox", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.region" => Some(("owner.operator.adr.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.adr.street" => Some(("owner.operator.adr.street", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.email.text" => Some(("owner.operator.email.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.fn" => Some(("owner.operator.fn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.org.text" => Some(("owner.operator.org.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.operator.tel.uri" => Some(("owner.operator.tel.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.code" => Some(("owner.owner.adr.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.country" => Some(("owner.owner.adr.country", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.locality" => Some(("owner.owner.adr.locality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.pobox" => Some(("owner.owner.adr.pobox", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.region" => Some(("owner.owner.adr.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.adr.street" => Some(("owner.owner.adr.street", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.email.text" => Some(("owner.owner.email.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.fn" => Some(("owner.owner.fn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.org.text" => Some(("owner.owner.org.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"owner.owner.tel.uri" => Some(("owner.owner.tel.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"request-type" => Some(("requestType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["adr", "antenna", "code", "country", "device-desc", "email", "etsi-en-device-category", "etsi-en-device-emissions-class", "etsi-en-device-type", "etsi-en-technology-id", "fcc-id", "fcc-tvbd-device-type", "fn", "height", "height-type", "height-uncertainty", "locality", "manufacturer-id", "master-device-desc", "model-id", "operator", "org", "owner", "pobox", "region", "request-type", "ruleset-ids", "serial-number", "street", "tel", "text", "type", "uri", "version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::PawsGetSpectrumBatchRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.paws().get_spectrum_batch(request);
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _paws_init(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"device-desc.etsi-en-device-category" => Some(("deviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-emissions-class" => Some(("deviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-type" => Some(("deviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-technology-id" => Some(("deviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-id" => Some(("deviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-tvbd-device-type" => Some(("deviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.manufacturer-id" => Some(("deviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.model-id" => Some(("deviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.ruleset-ids" => Some(("deviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"device-desc.serial-number" => Some(("deviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"location.confidence" => Some(("location.confidence", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"location.point.center.latitude" => Some(("location.point.center.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.center.longitude" => Some(("location.point.center.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.orientation" => Some(("location.point.orientation", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-major-axis" => Some(("location.point.semiMajorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-minor-axis" => Some(("location.point.semiMinorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["center", "confidence", "device-desc", "etsi-en-device-category", "etsi-en-device-emissions-class", "etsi-en-device-type", "etsi-en-technology-id", "fcc-id", "fcc-tvbd-device-type", "latitude", "location", "longitude", "manufacturer-id", "model-id", "orientation", "point", "ruleset-ids", "semi-major-axis", "semi-minor-axis", "serial-number", "type", "version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::PawsInitRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.paws().init(request);
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _paws_notify_spectrum_use(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"device-desc.etsi-en-device-category" => Some(("deviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-emissions-class" => Some(("deviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-type" => Some(("deviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-technology-id" => Some(("deviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-id" => Some(("deviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-tvbd-device-type" => Some(("deviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.manufacturer-id" => Some(("deviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.model-id" => Some(("deviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.ruleset-ids" => Some(("deviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"device-desc.serial-number" => Some(("deviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"location.confidence" => Some(("location.confidence", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"location.point.center.latitude" => Some(("location.point.center.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.center.longitude" => Some(("location.point.center.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.orientation" => Some(("location.point.orientation", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-major-axis" => Some(("location.point.semiMajorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-minor-axis" => Some(("location.point.semiMinorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["center", "confidence", "device-desc", "etsi-en-device-category", "etsi-en-device-emissions-class", "etsi-en-device-type", "etsi-en-technology-id", "fcc-id", "fcc-tvbd-device-type", "latitude", "location", "longitude", "manufacturer-id", "model-id", "orientation", "point", "ruleset-ids", "semi-major-axis", "semi-minor-axis", "serial-number", "type", "version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::PawsNotifySpectrumUseRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.paws().notify_spectrum_use(request);
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _paws_register(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError>
|
async fn _paws_verify_device(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["type", "version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::PawsVerifyDeviceRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.paws().verify_device(request);
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("paws", Some(opt)) => {
match opt.subcommand() {
("get-spectrum", Some(opt)) => {
call_result = self._paws_get_spectrum(opt, dry_run, &mut err).await;
},
("get-spectrum-batch", Some(opt)) => {
call_result = self._paws_get_spectrum_batch(opt, dry_run, &mut err).await;
},
("init", Some(opt)) => {
call_result = self._paws_init(opt, dry_run, &mut err).await;
},
("notify-spectrum-use", Some(opt)) => {
call_result = self._paws_notify_spectrum_use(opt, dry_run, &mut err).await;
},
("register", Some(opt)) => {
call_result = self._paws_register(opt, dry_run, &mut err).await;
},
("verify-device", Some(opt)) => {
call_result = self._paws_verify_device(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("paws".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "spectrum1-explorer-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
secret,
yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).persist_tokens_to_disk(format!("{}/spectrum1-explorer", config_dir)).build().await.unwrap();
let client = hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots());
let engine = Engine {
opt: opt,
hub: api::Spectrum::new(client, auth),
gp: vec!["alt", "fields", "key", "oauth-token", "pretty-print", "quota-user", "user-ip"],
gpm: vec![
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("user-ip", "userIp"),
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
let arg_data = [
("paws", "methods: 'get-spectrum', 'get-spectrum-batch', 'init', 'notify-spectrum-use', 'register' and 'verify-device'", vec![
("get-spectrum",
Some(r##"Requests information about the available spectrum for a device at a location. Requests from a fixed-mode device must include owner information so the device can be registered with the database."##),
"Details at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli/paws_get-spectrum",
vec![
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("get-spectrum-batch",
Some(r##"The Google Spectrum Database does not support batch requests, so this method always yields an UNIMPLEMENTED error."##),
"Details at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli/paws_get-spectrum-batch",
vec![
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("init",
Some(r##"Initializes the connection between a white space device and the database."##),
"Details at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli/paws_init",
vec![
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("notify-spectrum-use",
Some(r##"Notifies the database that the device has selected certain frequency ranges for transmission. Only to be invoked when required by the regulator. The Google Spectrum Database does not operate in domains that require notification, so this always yields an UNIMPLEMENTED error."##),
"Details at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli/paws_notify-spectrum-use",
vec![
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("register",
Some(r##"The Google Spectrum Database implements registration in the getSpectrum method. As such this always returns an UNIMPLEMENTED error."##),
"Details at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli/paws_register",
vec![
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("verify-device",
Some(r##"Validates a device for white space use in accordance with regulatory rules. The Google Spectrum Database does not support master/slave configurations, so this always yields an UNIMPLEMENTED error."##),
"Details at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli/paws_verify-device",
vec![
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("spectrum1-explorer")
.author("Sebastian Thiel <[email protected]>")
.version("2.0.0+20170306")
.about("API for spectrum-management functions.")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli")
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Debug print all errors")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches).await {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
|
{
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"antenna.height" => Some(("antenna.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"antenna.height-type" => Some(("antenna.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"antenna.height-uncertainty" => Some(("antenna.heightUncertainty", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-category" => Some(("deviceDesc.etsiEnDeviceCategory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-emissions-class" => Some(("deviceDesc.etsiEnDeviceEmissionsClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-device-type" => Some(("deviceDesc.etsiEnDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.etsi-en-technology-id" => Some(("deviceDesc.etsiEnTechnologyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-id" => Some(("deviceDesc.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.fcc-tvbd-device-type" => Some(("deviceDesc.fccTvbdDeviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.manufacturer-id" => Some(("deviceDesc.manufacturerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.model-id" => Some(("deviceDesc.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-desc.ruleset-ids" => Some(("deviceDesc.rulesetIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"device-desc.serial-number" => Some(("deviceDesc.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.adr.code" => Some(("deviceOwner.operator.adr.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.adr.country" => Some(("deviceOwner.operator.adr.country", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.adr.locality" => Some(("deviceOwner.operator.adr.locality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.adr.pobox" => Some(("deviceOwner.operator.adr.pobox", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.adr.region" => Some(("deviceOwner.operator.adr.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.adr.street" => Some(("deviceOwner.operator.adr.street", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.email.text" => Some(("deviceOwner.operator.email.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.fn" => Some(("deviceOwner.operator.fn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.org.text" => Some(("deviceOwner.operator.org.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.operator.tel.uri" => Some(("deviceOwner.operator.tel.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.adr.code" => Some(("deviceOwner.owner.adr.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.adr.country" => Some(("deviceOwner.owner.adr.country", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.adr.locality" => Some(("deviceOwner.owner.adr.locality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.adr.pobox" => Some(("deviceOwner.owner.adr.pobox", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.adr.region" => Some(("deviceOwner.owner.adr.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.adr.street" => Some(("deviceOwner.owner.adr.street", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.email.text" => Some(("deviceOwner.owner.email.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.fn" => Some(("deviceOwner.owner.fn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.org.text" => Some(("deviceOwner.owner.org.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"device-owner.owner.tel.uri" => Some(("deviceOwner.owner.tel.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"location.confidence" => Some(("location.confidence", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"location.point.center.latitude" => Some(("location.point.center.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.center.longitude" => Some(("location.point.center.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.orientation" => Some(("location.point.orientation", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-major-axis" => Some(("location.point.semiMajorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"location.point.semi-minor-axis" => Some(("location.point.semiMinorAxis", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["adr", "antenna", "center", "code", "confidence", "country", "device-desc", "device-owner", "email", "etsi-en-device-category", "etsi-en-device-emissions-class", "etsi-en-device-type", "etsi-en-technology-id", "fcc-id", "fcc-tvbd-device-type", "fn", "height", "height-type", "height-uncertainty", "latitude", "locality", "location", "longitude", "manufacturer-id", "model-id", "operator", "org", "orientation", "owner", "pobox", "point", "region", "ruleset-ids", "semi-major-axis", "semi-minor-axis", "serial-number", "street", "tel", "text", "type", "uri", "version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::PawsRegisterRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.paws().register(request);
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
|
test_issue_metrics.py
|
#SPDX-License-Identifier: MIT
import pytest
import pandas as pd
def test_issues_new(metrics):
#repo_id
assert metrics.issues_new(1, 1 , period='year').iloc[0]['issues'] > 0
#repo_group_id
assert metrics.issues_new(10, period='year').iloc[1]['issues'] > 0
#begin_date & end_date
assert metrics.issues_new(10, 25430, period='week', begin_date='2017',
end_date='2017-10').iloc[1]['issues'] > 0
assert metrics.issues_new(10, period='month', begin_date='2017-05',
end_date='2018').iloc[2]['issues'] > 0
def test_issues_active(metrics):
# repo
assert metrics.issues_active(1, 1, period='year').iloc[0]['issues'] > 0
# repo_group
assert metrics.issues_active(10, period='year').iloc[0]['issues'] > 0
# begin_date & end_date
assert metrics.issues_active(10, 25430, period='month', begin_date='2020-02',
end_date='2020-03').iloc[0]['issues'] > 0
assert metrics.issues_active(10, period='week', begin_date='2020-01',
end_date='2020-03') .iloc[0]['issues'] > 0
def test_issues_closed(metrics):
# repo
assert metrics.issues_closed(10, 25430, period='year').iloc[0]['issues'] > 0
#repo_group
assert metrics.issues_closed(10, period='year').iloc[0]['issues'] > 0
# begin_date & end_date
assert metrics.issues_closed(10, 25430, period='week', begin_date='2019',
end_date='2020-02').iloc[0]['issues'] > 0
assert metrics.issues_closed(10, period='month', begin_date='2018-05',
end_date='2019-08-15').iloc[0]['issues'] > 0
def test_issue_duration(metrics):
# repo
assert metrics.issue_duration(10, 25430).iloc[0]['duration'] == '20 days 03:08:22.000000000'
# repo_group
assert metrics.issue_duration(10).iloc[0]['duration'] == '20 days 03:08:22.000000000'
def test_issue_participants(metrics):
# repo
assert metrics.issue_participants(10, 25430).iloc[0]['participants'] > 0
# repo_group
assert metrics.issue_participants(10).iloc[0]['participants'] > 0
def test_issue_throughput(metrics):
# repo
assert metrics.issue_throughput(10, 25430).iloc[0]['throughput'] >= 0
# repo_group
assert metrics.issue_throughput(10).iloc[0]['throughput'] >= 0
def test_issue_backlog(metrics):
#repo_id
assert metrics.issue_backlog(10, 25430).iloc[0]['issue_backlog'] > 0
#repo_group_id
assert metrics.issue_backlog(10).iloc[0]['issue_backlog'] > 0
def test_issues_first_time_closed(metrics):
# repo id
assert metrics.issues_first_time_closed(10, repo_id=25430, period='year').isin(
[pd.Timestamp('2019', tz='UTC')]).any().any()
# repo_group_id
assert metrics.issues_first_time_closed(10, period='year').isin(
[pd.Timestamp('2020', tz='UTC')]).any().any()
# begin_date and end_date
assert metrics.issues_first_time_closed(10, period='year', begin_date='2019-1-1 00:00:00',
end_date='2019-12-31 23:59:59').isin([pd.Timestamp('2019-01-01 00:00:00', tz='UTC')]).any().any()
assert metrics.issues_first_time_closed(10, repo_id=25430, period='year', begin_date='2019-1-1 00:00:00',
end_date='2019-12-31 23:59:59').isin([pd.Timestamp('2019-01-01 00:00:00', tz='UTC')]).any().any()
def test_open_issues_count(metrics):
# repo
assert metrics.open_issues_count(10, 25430).iloc[0]['open_count'] > 0
# repo_group
assert metrics.open_issues_count(10).iloc[0]['open_count'] > 0
def test_closed_issues_count(metrics):
# repo
assert metrics.closed_issues_count(10, 25430).iloc[0]['closed_count'] > 0
# repo_group
assert metrics.closed_issues_count(10).iloc[0]['closed_count'] > 0
def test_issues_open_age(metrics):
#repo group
assert metrics.issues_open_age(10).iloc[0]['open_date'] > 0
# repo
assert metrics.issues_open_age(10, 25430).iloc[0]['open_date'] > 0
def test_issues_closed_resolution_duration(metrics):
# repo group
|
def test_average_issue_resolution_time(metrics):
#repo
assert metrics.average_issue_resolution_time(10, 25430).isin(
['augur', '61 days 12:20:43.791667']).any().any()
# repo_group
assert metrics.average_issue_resolution_time(10).isin(
['grimoirelab', ' 67 days 22:41:55.260417']).any().any()
def test_issues_maintainer_response_duration(metrics):
assert metrics.issues_maintainer_response_duration(10, 25430).iloc[0].average_days_comment > 0
assert metrics.issues_maintainer_response_duration(10).iloc[0].average_days_comment > 0
assert metrics.issues_maintainer_response_duration(10, 25430).iloc[0].average_days_comment > 0
def test_issue_comments_mean(metrics):
assert metrics.issue_comments_mean(10).any().any()
assert metrics.issue_comments_mean(10, 25430).any().any()
assert metrics.issue_comments_mean(10, group_by='year').any().any()
assert metrics.issue_comments_mean(10, 25430, group_by='year').any().any()
def test_issue_comments_mean_std(metrics):
assert metrics.issue_comments_mean_std(10).any().any()
assert metrics.issue_comments_mean_std(10, 25430).any().any()
assert metrics.issue_comments_mean_std(10, group_by='year').any().any()
assert metrics.issue_comments_mean_std(10, 25430, group_by='year').any().any()
|
assert metrics.issues_closed_resolution_duration(10).iloc[0]['diffdate'] >= 0
# repo
assert metrics.issues_closed_resolution_duration(10, 25430).iloc[0]['diffdate'] >= 0
|
acm_ui.py
|
import os
import logging
import time
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.keys import Keys
from ocs_ci.ocs import constants
from ocs_ci.ocs.exceptions import ACMClusterDeployException
from ocs_ci.ocs.ui.base_ui import BaseUI
from ocs_ci.ocs.ui.helpers_ui import format_locator
from ocs_ci.ocs.ui.views import locators
from ocs_ci.utility.utils import (
get_ocp_version,
expose_ocp_version,
run_cmd,
)
from ocs_ci.ocs.constants import (
PLATFORM_XPATH_MAP,
ACM_PLATOFRM_VSPHERE_CRED_PREFIX,
VSPHERE_CA_FILE_PATH,
DATA_DIR,
ACM_OCP_RELEASE_IMG_URL_PREFIX,
ACM_VSPHERE_NETWORK,
ACM_CLUSTER_DEPLOY_TIMEOUT,
ACM_CLUSTER_DEPLOYMENT_LABEL_KEY,
ACM_CLUSTER_DEPLOYMENT_SECRET_TYPE_LABEL_KEY,
)
from ocs_ci.framework import config
from ocs_ci.utility.retry import retry
log = logging.getLogger(__name__)
class AcmPageNavigator(BaseUI):
"""
ACM Page Navigator Class
"""
def __init__(self, driver):
super().__init__(driver)
self.ocp_version = get_ocp_version()
self.acm_page_nav = locators[self.ocp_version]["acm_page"]
def navigate_welcome_page(self):
"""
Navigate to ACM Welcome Page
"""
log.info("Navigate into Home Page")
self.choose_expanded_mode(mode=True, locator=self.acm_page_nav["Home"])
self.do_click(locator=self.acm_page_nav["Welcome_page"])
def navigate_overview_page(self):
"""
Navigate to ACM Overview Page
"""
log.info("Navigate into Overview Page")
self.choose_expanded_mode(mode=True, locator=self.acm_page_nav["Home"])
self.do_click(locator=self.acm_page_nav["Overview_page"])
def navigate_clusters_page(self):
"""
Navigate to ACM Clusters Page
"""
log.info("Navigate into Clusters Page")
self.choose_expanded_mode(
mode=True, locator=self.acm_page_nav["Infrastructure"]
)
self.do_click(locator=self.acm_page_nav["Clusters_page"])
def navigate_bare_metal_assets_page(self):
"""
Navigate to ACM Bare Metal Assets Page
"""
log.info("Navigate into Bare Metal Assets Page")
self.choose_expanded_mode(
mode=True, locator=self.acm_page_nav["Infrastructure"]
)
self.do_click(locator=self.acm_page_nav["Bare_metal_assets_page"])
def navigate_automation_page(self):
"""
Navigate to ACM Automation Page
"""
log.info("Navigate into Automation Page")
self.choose_expanded_mode(
mode=True, locator=self.acm_page_nav["Infrastructure"]
)
self.do_click(locator=self.acm_page_nav["Automation_page"])
def navigate_infrastructure_env_page(self):
"""
Navigate to ACM Infrastructure Environments Page
"""
log.info("Navigate into Infrastructure Environments Page")
self.choose_expanded_mode(
mode=True, locator=self.acm_page_nav["Infrastructure"]
)
self.do_click(locator=self.acm_page_nav["Infrastructure_environments_page"])
def navigate_applications_page(self):
"""
Navigate to ACM Applications Page
"""
log.info("Navigate into Applications Page")
self.do_click(locator=self.acm_page_nav["Applications"])
def navigate_governance_page(self):
"""
Navigate to ACM Governance Page
"""
log.info("Navigate into Governance Page")
self.do_click(locator=self.acm_page_nav["Governance"])
def navigate_credentials_page(self):
"""
Navigate to ACM Credentials Page
"""
log.info("Navigate into Governance Page")
self.do_click(locator=self.acm_page_nav["Credentials"])
class ACMOCPClusterDeployment(AcmPageNavigator):
"""
Everything related to cluster creation through ACM goes here
"""
def __init__(self, driver, platform, cluster_conf):
super().__init__(driver)
self.platform = platform
self.cluster_conf = cluster_conf
self.cluster_name = self.cluster_conf.ENV_DATA["cluster_name"]
self.cluster_path = self.cluster_conf.ENV_DATA["cluster_path"]
self.deploy_sync_mode = config.MULTICLUSTER.get("deploy_sync_mode", "async")
self.deployment_status = None
self.cluster_deploy_timeout = self.cluster_conf.ENV_DATA.get(
"cluster_deploy_timeout", ACM_CLUSTER_DEPLOY_TIMEOUT
)
self.deployment_failed_reason = None
self.deployment_start_time = 0
def create_cluster_prereq(self):
raise NotImplementedError("Child class has to implement this method")
def navigate_create_clusters_page(self):
# Navigate to Clusters page which has 'Create Cluster'/
# 'Import Cluster' buttons
# Here we click on "Create Cluster" and we will be in create cluster page
while True:
self.navigate_clusters_page()
log.info("Clicking on 'CreateCluster'")
# Because of weird selenium behaviour we are checking
# for CreateCluster button in 3 different ways
# 1. CreateCluster button
# 2. CreateCluster button with index xpath
# 3. Checking url, which should end with 'create-cluster'
if not self.check_element_presence(
(By.XPATH, self.acm_page_nav["cc_create_cluster"][0]), timeout=60
):
log.error("Create cluster button not found")
raise ACMClusterDeployException("Can't continue with deployment")
log.info("check 1:Found create cluster button")
if not self.check_element_presence(
(By.XPATH, self.acm_page_nav["cc_create_cluster_index_xpath"][0]),
timeout=300,
):
log.error("Create cluster button not found")
raise ACMClusterDeployException("Can't continue with deployment")
log.info("check 2:Found create cluster by index path")
self.do_click(locator=self.acm_page_nav["cc_create_cluster"], timeout=100)
time.sleep(20)
if self.driver.current_url.endswith("create-cluster"):
break
def click_next_button(self):
self.do_click(self.acm_page_nav["cc_next_page_button"])
def fill_multiple_textbox(self, key_val):
"""
In a page if we want to fill multiple text boxes we can use
this function which iteratively fills in values from the dictionary parameter
key_val (dict): keys corresponds to the xpath of text box, value corresponds
to the value to be filled in
"""
for xpath, value in key_val.items():
self.do_send_keys(locator=xpath, text=value)
def click_platform_and_credentials(self):
self.navigate_create_clusters_page()
self.do_click(
locator=self.acm_page_nav[PLATFORM_XPATH_MAP[self.platform]], timeout=100
)
self.do_click(
locator=self.acm_page_nav["cc_infrastructure_provider_creds_dropdown"]
)
credential = format_locator(
self.acm_page_nav["cc_infrastructure_provider_creds_select_creds"],
self.platform_credential_name,
)
self.do_click(locator=credential)
@retry(ACMClusterDeployException, tries=3, delay=10, backoff=1)
def goto_cluster_details_page(self):
self.navigate_clusters_page()
locator = format_locator(self.acm_page_nav["cc_table_entry"], self.cluster_name)
self.do_click(locator=locator)
self.do_click(locator=self.acm_page_nav["cc_cluster_details_page"], timeout=100)
self.choose_expanded_mode(True, self.acm_page_nav["cc_details_toggle_icon"])
def get_deployment_status(self):
self.goto_cluster_details_page()
if self.acm_cluster_status_failed(timeout=2):
self.deployment_status = "failed"
elif self.acm_cluster_status_ready(timeout=2):
self.deployment_status = "ready"
elif self.acm_cluster_status_creating(timeout=2):
self.deployment_status = "creating"
else:
self.deployment_status = "unknown"
elapsed_time = int(time.time() - self.deployment_start_time)
if elapsed_time > self.cluster_deploy_timeout:
if self.deployment_status == "creating":
self.deployment_status = "failed"
self.deployment_failed_reason = "deploy_timeout"
def wait_for_cluster_create(self):
# Wait for status creating
staus_check_timeout = 300
while (
not self.acm_cluster_status_ready(staus_check_timeout)
and self.cluster_deploy_timeout >= 1
):
self.cluster_deploy_timeout -= staus_check_timeout
if self.acm_cluster_status_creating():
log.info(f"Cluster {self.cluster_name} is in 'Creating' phase")
else:
self.acm_bailout_if_failed()
if self.acm_cluster_status_ready():
log.info(
f"Cluster create successful, Cluster {self.cluster_name} is in 'Ready' state"
)
def acm_bailout_if_failed(self):
if self.acm_cluster_status_failed():
raise ACMClusterDeployException("Deployment is in 'FAILED' state")
def acm_cluster_status_failed(self, timeout=5):
return self.check_element_presence(
(
self.acm_page_nav["cc_cluster_status_page_status_failed"][1],
self.acm_page_nav["cc_cluster_status_page_status_failed"][0],
),
timeout=timeout,
)
def acm_cluster_status_ready(self, timeout=120):
return self.check_element_presence(
(
self.acm_page_nav["cc_cluster_status_page_status_ready"][1],
self.acm_page_nav["cc_cluster_status_page_status_ready"][0],
),
timeout=timeout,
)
def acm_cluster_status_creating(self, timeout=120):
return self.check_element_presence(
(
self.acm_page_nav["cc_cluster_status_page_status_creating"][1],
self.acm_page_nav["cc_cluster_status_page_status_creating"][0],
),
timeout=timeout,
)
def download_cluster_conf_files(self):
"""
Download install-config and kubeconfig to cluster dir
"""
if not os.path.exists(os.path.expanduser(f"{self.cluster_path}")):
os.mkdir(os.path.expanduser(f"{self.cluster_path}"))
# create auth dir inside cluster dir
auth_dir = os.path.join(os.path.expanduser(f"{self.cluster_path}"), "auth")
if not os.path.exists(auth_dir):
os.mkdir(auth_dir)
self.download_kubeconfig(auth_dir)
def download_kubeconfig(self, authdir):
get_kubeconf_secret_cmd = (
f"$(oc get secret -o name -n {self.cluster_name} "
f"-l {ACM_CLUSTER_DEPLOYMENT_LABEL_KEY}={self.cluster_name} "
f"-l {ACM_CLUSTER_DEPLOYMENT_SECRET_TYPE_LABEL_KEY}=kubeconfig)"
)
extract_cmd = (
f"oc extract -n {self.cluster_name} "
f"{get_kubeconf_secret_cmd} "
f"--to={authdir} --confirm"
)
run_cmd(extract_cmd)
if not os.path.exists(os.path.join(authdir, "kubeconfig")):
raise ACMClusterDeployException("Could not find the kubeconfig")
def create_cluster(self, cluster_config=None):
|
class ACMOCPPlatformVsphereIPI(ACMOCPClusterDeployment):
"""
This class handles all behind the scene activities
for cluster creation through ACM for vsphere platform
"""
def __init__(self, driver, cluster_conf=None):
super().__init__(driver=driver, platform="vsphere", cluster_conf=cluster_conf)
self.platform_credential_name = cluster_conf.ENV_DATA.get(
"platform_credential_name",
f"{ACM_PLATOFRM_VSPHERE_CRED_PREFIX}{self.cluster_name}",
)
# API VIP & Ingress IP
self.ips = None
self.vsphere_network = None
def create_cluster_prereq(self, timeout=600):
"""
Perform all prereqs before vsphere cluster creation from ACM
Args:
timeout (int): Timeout for any UI operations
"""
# Create vsphre credentials
# Click on 'Add credential' in 'Infrastructure provider' page
self.navigate_create_clusters_page()
self.refresh_page()
hard_timeout = config.ENV_DATA.get("acm_ui_hard_deadline", 1200)
remaining = hard_timeout
while True:
ret = self.check_element_presence(
(By.XPATH, self.acm_page_nav[PLATFORM_XPATH_MAP[self.platform]][0]),
timeout=300,
)
if ret:
log.info("Found platform icon")
break
else:
if remaining < 0:
raise TimeoutException("Timedout while waiting for platform icon")
else:
remaining -= timeout
self.navigate_create_clusters_page()
self.refresh_page()
self.do_click(
locator=self.acm_page_nav[PLATFORM_XPATH_MAP[self.platform]], timeout=100
)
# "Basic vsphere credential info"
# 1. credential name
# 2. Namespace
# 3. Base DNS domain
self.do_click(locator=self.acm_page_nav["cc_provider_credentials"], timeout=100)
parent_tab = self.driver.current_window_handle
tabs = self.driver.window_handles
self.driver.switch_to.window(tabs[1])
self.do_click(locator=self.acm_page_nav["cc_provider_creds_vsphere"])
basic_cred_dict = {
self.acm_page_nav[
"cc_provider_creds_vsphere_cred_name"
]: self.platform_credential_name,
self.acm_page_nav[
"cc_provider_creds_vsphere_base_dns"
]: f"{self.cluster_conf.ENV_DATA['base_domain']}",
}
self.fill_multiple_textbox(basic_cred_dict)
# Credential Namespace is not a text box but a dropdown
self.do_click(self.acm_page_nav["cc_provider_creds_vsphere_cred_namespace"])
self.do_click(self.acm_page_nav["cc_provider_creds_default_namespace"])
# click on 'Next' button at the bottom
self.click_next_button()
# Detailed VMWare credentials section
# 1. vCenter server
# 2. vCenter username
# 3. vCenter password
# 4. cVenter root CA certificate
# 5. vSphere cluster name
# 6. vSphere datacenter
# 7. vSphere default Datastore
with open(VSPHERE_CA_FILE_PATH, "r") as fp:
vsphere_ca = fp.read()
vsphere_creds_dict = {
self.acm_page_nav[
"cc_provider_creds_vsphere_vcenter_server"
]: f"{self.cluster_conf.ENV_DATA['vsphere_server']}",
self.acm_page_nav[
"cc_provider_creds_vsphere_username"
]: f"{self.cluster_conf.ENV_DATA['vsphere_user']}",
self.acm_page_nav[
"cc_provider_creds_vsphere_password"
]: f"{self.cluster_conf.ENV_DATA['vsphere_password']}",
self.acm_page_nav["cc_provider_creds_vsphere_rootca"]: f"{vsphere_ca}",
self.acm_page_nav[
"cc_provider_creds_vsphere_clustername"
]: f"{self.cluster_conf.ENV_DATA['vsphere_cluster']}",
self.acm_page_nav[
"cc_provider_creds_vsphere_dc"
]: f"{self.cluster_conf.ENV_DATA['vsphere_datacenter']}",
self.acm_page_nav[
"cc_provider_creds_vsphere_datastore"
]: f"{self.cluster_conf.ENV_DATA['vsphere_datastore']}",
}
self.fill_multiple_textbox(vsphere_creds_dict)
self.click_next_button()
# Pull Secret and SSH
# 1. Pull secret
# 2. SSH Private key
# 3. SSH Public key
with open(os.path.join(DATA_DIR, "pull-secret"), "r") as fp:
pull_secret = fp.read()
ssh_pub_key_path = os.path.expanduser(self.cluster_conf.DEPLOYMENT["ssh_key"])
ssh_priv_key_path = os.path.expanduser(
self.cluster_conf.DEPLOYMENT["ssh_key_private"]
)
with open(ssh_pub_key_path, "r") as fp:
ssh_pub_key = fp.read()
with open(ssh_priv_key_path, "r") as fp:
ssh_priv_key = fp.read()
pull_secret_and_ssh = {
self.acm_page_nav["cc_provider_creds_vsphere_pullsecret"]: f"{pull_secret}",
self.acm_page_nav[
"cc_provider_creds_vsphere_ssh_privkey"
]: f"{ssh_priv_key}",
self.acm_page_nav["cc_provider_creds_vsphere_ssh_pubkey"]: f"{ssh_pub_key}",
}
self.fill_multiple_textbox(pull_secret_and_ssh)
self.click_next_button()
self.do_click(locator=self.acm_page_nav["cc_provider_creds_vsphere_add_button"])
# Go to credentials tab
self.do_click(locator=self.acm_page_nav["Credentials"])
credential_table_entry = format_locator(
self.acm_page_nav["cc_table_entry"], self.platform_credential_name
)
if not self.check_element_presence(
(By.XPATH, credential_table_entry[0]), timeout=20
):
raise ACMClusterDeployException("Could not create credentials for vsphere")
else:
log.info(
f"vsphere credential successfully created {self.platform_credential_name}"
)
# Get the ips in prereq itself
from ocs_ci.deployment import vmware
# Switch context to cluster which we are about to create
prev_ctx = config.cur_index
config.switch_ctx(self.cluster_conf.MULTICLUSTER["multicluster_index"])
self.ips = vmware.assign_ips(2)
vmware.create_dns_records(self.ips)
config.switch_ctx(prev_ctx)
self.driver.close()
self.driver.switch_to.window(parent_tab)
self.driver.switch_to.default_content()
def create_cluster(self):
"""
This function navigates through following pages in the UI
1. Cluster details
2. Node poools
3. Networks
4. Proxy
5. Automation
6. Review
Raises:
ACMClusterDeployException: If deployment failed for the cluster
"""
self.navigate_create_clusters_page()
self.click_platform_and_credentials()
self.click_next_button()
self.fill_cluster_details_page()
self.click_next_button()
# For now we don't do anything in 'Node Pools' page
self.click_next_button()
self.fill_network_info()
self.click_next_button()
# Skip proxy for now
self.click_next_button()
# Skip Automation for now
self.click_next_button()
# We are at Review page
# Click on create
self.do_click(locator=self.acm_page_nav["cc_create_button"])
self.deployment_start_time = time.time()
# We will be redirect to 'Details' page which has cluster deployment progress
if self.deploy_sync_mode == "sync":
try:
self.wait_for_cluster_create()
except ACMClusterDeployException:
log.error(
f"Failed to create OCP cluster {self.cluster_conf.ENV_DATA['cluster_name']}"
)
raise
# Download kubeconfig and install-config file
self.download_cluster_conf_files()
else:
# Async mode of deployment, so just return to caller
# we will just wait for status 'Creating' and then return
if not self.acm_cluster_status_creating(timeout=600):
raise ACMClusterDeployException(
f"Cluster {self.cluster_name} didn't reach 'Creating' phase"
)
self.deployment_status = "Creating"
return
def fill_network_info(self):
"""
We need to fill following network info
1. vSphere network name
2. API VIP
3. Ingress VIP
"""
self.vsphere_network = self.cluster_conf.ENV_DATA.get(
"vm_network", ACM_VSPHERE_NETWORK
)
self.do_click(self.acm_page_nav["cc_vsphere_network_name"])
self.do_send_keys(
self.acm_page_nav["cc_vsphere_network_name"], self.vsphere_network
)
# Chrome has a weird problem of trimming the whitespace
# Suppose if network name is 'VM Network', when we put this text
# in text box it automatically becomes 'VMNetwork', hence we need to take
# care
ele = self.driver.find_element(
By.XPATH, self.acm_page_nav["cc_vsphere_network_name"][0]
)
remote_text = ele.get_property("value")
if remote_text != self.vsphere_network:
# Check if we have white space char
# in network name
try:
index = self.vsphere_network.index(constants.SPACE)
left_shift_offset = len(remote_text) - index
self.do_send_keys(
self.acm_page_nav["cc_vsphere_network_name"],
f"{left_shift_offset*Keys.ARROW_LEFT}{constants.SPACE}",
)
except ValueError:
raise ACMClusterDeployException(
"Weird browser behaviour, Not able to provide vsphere network info"
)
vsphere_network = {
self.acm_page_nav["cc_api_vip"]: f"{self.ips[0]}",
self.acm_page_nav["cc_ingress_vip"]: f"{self.ips[1]}",
}
self.fill_multiple_textbox(vsphere_network)
def fill_cluster_details_page(self):
"""
Fill in following details in "Cluster details" page
1. Cluster name
2. Base DNS domain
3. Release image
"""
release_img = self.get_ocp_release_img()
cluster_details = {
self.acm_page_nav[
"cc_cluster_name"
]: f"{self.cluster_conf.ENV_DATA['cluster_name']}",
self.acm_page_nav["cc_openshift_release_image"]: f"{release_img}",
}
self.fill_multiple_textbox(cluster_details)
def get_ocp_release_img(self):
vers = expose_ocp_version(self.cluster_conf.DEPLOYMENT["installer_version"])
return f"{ACM_OCP_RELEASE_IMG_URL_PREFIX}:{vers}"
class ACMOCPDeploymentFactory(object):
def __init__(self):
# All platform specific classes should have map here
self.platform_map = {"vsphereipi": ACMOCPPlatformVsphereIPI}
def get_platform_instance(self, driver, cluster_config):
"""
Args:
driver: selenium UI driver object
cluster_config (dict): Cluster Config object
"""
platform_deployment = (
f"{cluster_config.ENV_DATA['platform']}"
f"{cluster_config.ENV_DATA['deployment_type']}"
)
return self.platform_map[platform_deployment](driver, cluster_config)
|
"""
Create cluster using ACM UI
Args:
cluster_config (Config): framework.Config object of complete configuration required
for deployment
"""
raise NotImplementedError("Child class should implement this function")
|
upload.shapefile.dto.ts
|
import { ApiPropertyOptional } from '@nestjs/swagger';
import { IsOptional, IsString } from 'class-validator';
export class
|
{
@ApiPropertyOptional()
@IsString()
@IsOptional()
name?: string;
}
|
UploadShapefileDto
|
commands.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import sys
import traceback
from telegram import ParseMode
from . import menus, keyboards
from ..sources import wf, twitch
from ..utils import utils
from ..utils.logging import logger
from ..utils.loadconfig import config
warframe = wf.Warframe()
tw = twitch.Twitch()
def error(update, context):
trace = ''.join(traceback.format_tb(sys.exc_info()[2]))
text = f'The error <code>{context.error}</code> happened. The full traceback:\n\n<code>{trace}</code>'
context.bot.send_message(config['admin_id'], text, parse_mode=ParseMode.HTML)
logger.error(f'Update: {update}')
raise context.error
def start(update, context):
utils.update_user_data(update.message.from_user, context.user_data)
update.message.reply_text('Please choose:', reply_markup=keyboards.main_menu_keyboard())
def alerts(update, context):
utils.update_user_data(update.message.from_user, context.user_data)
update.message.reply_text(text=wf.get_alerts(), parse_mode=ParseMode.MARKDOWN)
def invasions(update, context):
utils.update_user_data(update.message.from_user, context.user_data)
text = wf.get_invasions(update.message.from_user.id, True, False)
update.message.reply_text(text=text, parse_mode=ParseMode.MARKDOWN)
def void_trader(update, context):
if update.message is not None:
from_user = update.message.from_user
else:
from_user = update.callback_query.from_user
utils.update_user_data(update.message.from_user, context.user_data)
text = wf.get_void_trader_items(from_user.id)
update.message.reply_text(text=text, parse_mode=ParseMode.MARKDOWN)
def twitch_get_channel_status(update, context):
|
def admin(update, context):
utils.update_user_data(update.message.from_user, context.user_data)
menus.admin_menu(update, context)
def job_invasions(update):
chat_id = update.job.context
msg = warframe.get_invasions(chat_id, False, True)
if msg and msg != wf.msg_no_invasions and msg != wf.msg_no_invasions_rare_rewards:
update.bot.send_message(chat_id=chat_id, text=msg, parse_mode=ParseMode.MARKDOWN)
def job_alerts(update):
chat_id = update.job.context
msg = warframe.get_alerts(chat_id, True, True)
if msg and msg != wf.msg_no_alerts:
update.bot.send_message(chat_id=chat_id, text=msg, parse_mode=ParseMode.MARKDOWN)
def job_void_trader(update):
chat_id = update.job.context
msg = warframe.get_void_trader_items(chat_id, True)
if msg:
update.bot.send_message(chat_id=chat_id, text=msg, parse_mode=ParseMode.MARKDOWN)
def job_twitch_get_channel_status(update):
chat_id = update.job.context
text, info = tw.get_twitch_status(chat_id, True)
if text and text != twitch.msg_not_active:
update.bot.send_message(chat_id=chat_id, text=text, parse_mode=ParseMode.MARKDOWN)
|
utils.update_user_data(update.message.from_user, context.user_data)
text, info = tw.get_twitch_status(update.message.from_user.id)
update.message.reply_text(text=text, parse_mode=ParseMode.MARKDOWN)
|
jquery.backTop.min.js
|
(function($) {
$.fn.backTop = function(options) {
var backBtn = this;
var settings = $.extend({
'position' : 400,
'speed' : 500,
'color' : 'white',
'theme' : '' // red, green, white, custom
}, options);
//Settings
var position = settings['position'];
var speed = settings['speed'];
var color = settings['color'];
var theme = settings['theme'];
if(theme == 'white'){
backBtn.addClass('white');
} else if(theme == 'red'){
backBtn.addClass('red');
}else if(theme == 'green'){
|
backBtn.addClass('black');
}
backBtn.css({
'right' : 24,
'bottom' : 24,
'position' : 'fixed'
});
$(document).scroll(function(){
var pos = $(window).scrollTop();
// console.log(pos);
if(pos >= position){
backBtn.fadeIn(speed);
} else{
backBtn.fadeOut(speed);
}
});
backBtn.click(function(){
$("html, body").animate({
scrollTop:0
},
{
duration: 1200
});
});
}
}(jQuery));
|
backBtn.addClass('green');
}else if(theme == 'custom'){
backBtn.addClass('custom');
} else{
|
reconciler.go
|
package liveupdate
import (
"context"
"fmt"
"sync"
"time"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/builder"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/handler"
"sigs.k8s.io/controller-runtime/pkg/source"
apierrors "k8s.io/apimachinery/pkg/api/errors"
ctrl "sigs.k8s.io/controller-runtime"
ctrlclient "sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
"github.com/docker/distribution/reference"
"github.com/tilt-dev/tilt/internal/build"
"github.com/tilt-dev/tilt/internal/container"
"github.com/tilt-dev/tilt/internal/containerupdate"
"github.com/tilt-dev/tilt/internal/controllers/apicmp"
"github.com/tilt-dev/tilt/internal/controllers/apis/configmap"
"github.com/tilt-dev/tilt/internal/controllers/apis/liveupdate"
"github.com/tilt-dev/tilt/internal/controllers/indexer"
"github.com/tilt-dev/tilt/internal/k8s"
"github.com/tilt-dev/tilt/internal/ospath"
"github.com/tilt-dev/tilt/internal/sliceutils"
"github.com/tilt-dev/tilt/internal/store"
"github.com/tilt-dev/tilt/internal/store/buildcontrols"
"github.com/tilt-dev/tilt/internal/store/k8sconv"
"github.com/tilt-dev/tilt/internal/store/liveupdates"
"github.com/tilt-dev/tilt/pkg/apis"
"github.com/tilt-dev/tilt/pkg/apis/core/v1alpha1"
"github.com/tilt-dev/tilt/pkg/logger"
"github.com/tilt-dev/tilt/pkg/model"
"github.com/tilt-dev/tilt/pkg/model/logstore"
)
var discoveryGVK = v1alpha1.SchemeGroupVersion.WithKind("KubernetesDiscovery")
var applyGVK = v1alpha1.SchemeGroupVersion.WithKind("KubernetesApply")
var fwGVK = v1alpha1.SchemeGroupVersion.WithKind("FileWatch")
var imageMapGVK = v1alpha1.SchemeGroupVersion.WithKind("ImageMap")
var reasonObjectNotFound = "ObjectNotFound"
// Manages the LiveUpdate API object.
type Reconciler struct {
client ctrlclient.Client
indexer *indexer.Indexer
store store.RStore
ExecUpdater containerupdate.ContainerUpdater
DockerUpdater containerupdate.ContainerUpdater
updateMode liveupdates.UpdateMode
kubeContext k8s.KubeContext
startedTime metav1.MicroTime
monitors map[string]*monitor
// TODO(nick): Remove this mutex once ForceApply is gone.
mu sync.Mutex
}
var _ reconcile.Reconciler = &Reconciler{}
// Dependency-inject a live update reconciler.
func NewReconciler(
st store.RStore,
dcu *containerupdate.DockerUpdater,
ecu *containerupdate.ExecUpdater,
updateMode liveupdates.UpdateMode,
kubeContext k8s.KubeContext,
client ctrlclient.Client,
scheme *runtime.Scheme) *Reconciler {
return &Reconciler{
DockerUpdater: dcu,
ExecUpdater: ecu,
updateMode: updateMode,
kubeContext: kubeContext,
client: client,
indexer: indexer.NewIndexer(scheme, indexLiveUpdate),
store: st,
startedTime: apis.NowMicro(),
monitors: make(map[string]*monitor),
}
}
// Create a reconciler baked by a fake ContainerUpdater and Client.
func NewFakeReconciler(
st store.RStore,
cu containerupdate.ContainerUpdater,
client ctrlclient.Client) *Reconciler {
scheme := v1alpha1.NewScheme()
return &Reconciler{
DockerUpdater: cu,
ExecUpdater: cu,
updateMode: liveupdates.UpdateModeAuto,
kubeContext: k8s.KubeContext("fake-context"),
client: client,
indexer: indexer.NewIndexer(scheme, indexLiveUpdate),
store: st,
startedTime: apis.NowMicro(),
monitors: make(map[string]*monitor),
}
}
func (r *Reconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) {
r.mu.Lock()
defer r.mu.Unlock()
lu := &v1alpha1.LiveUpdate{}
err := r.client.Get(ctx, req.NamespacedName, lu)
r.indexer.OnReconcile(req.NamespacedName, lu)
if err != nil && !apierrors.IsNotFound(err) {
return ctrl.Result{}, fmt.Errorf("liveupdate reconcile: %v", err)
}
if apierrors.IsNotFound(err) || lu.ObjectMeta.DeletionTimestamp != nil {
r.store.Dispatch(liveupdates.NewLiveUpdateDeleteAction(req.Name))
delete(r.monitors, req.Name)
return ctrl.Result{}, nil
}
// The apiserver is the source of truth, and will ensure the engine state is up to date.
r.store.Dispatch(liveupdates.NewLiveUpdateUpsertAction(lu))
ctx = store.MustObjectLogHandler(ctx, r.store, lu)
if lu.Annotations[v1alpha1.AnnotationManagedBy] != "" {
// A LiveUpdate can't be managed by the reconciler until all the objects
// it depends on are managed by the reconciler. The Tiltfile controller
// is responsible for marking objects that we want to manage with ForceApply().
return ctrl.Result{}, nil
}
invalidSelectorFailedState := r.ensureSelectorValid(lu)
if invalidSelectorFailedState != nil {
return r.handleFailure(ctx, lu, invalidSelectorFailedState)
}
monitor := r.ensureMonitorExists(lu.Name, lu)
hasFileChanges, err := r.reconcileSources(ctx, monitor)
if err != nil {
if apierrors.IsNotFound(err) {
return r.handleFailure(ctx, lu, createFailedState(lu, reasonObjectNotFound, err.Error()))
}
return ctrl.Result{}, err
}
hasKubernetesChanges, err := r.reconcileKubernetesResource(ctx, monitor)
if err != nil {
if apierrors.IsNotFound(err) {
return r.handleFailure(ctx, lu, createFailedState(lu, reasonObjectNotFound, err.Error()))
}
return ctrl.Result{}, err
}
hasTriggerQueueChanges, err := r.reconcileTriggerQueue(ctx, monitor)
if err != nil {
return ctrl.Result{}, err
}
if hasFileChanges || hasKubernetesChanges || hasTriggerQueueChanges {
monitor.hasChangesToSync = true
}
if monitor.hasChangesToSync {
status := r.maybeSync(ctx, lu, monitor)
if status.Failed != nil {
// Log any new failures.
isNew := lu.Status.Failed == nil || !apicmp.DeepEqual(lu.Status.Failed, status.Failed)
if isNew && r.shouldLogFailureReason(status.Failed) {
logger.Get(ctx).Infof("LiveUpdate %q %s: %v", lu.Name, status.Failed.Reason, status.Failed.Message)
}
}
if !apicmp.DeepEqual(lu.Status, status) {
update := lu.DeepCopy()
update.Status = status
err := r.client.Status().Update(ctx, update)
if err != nil {
return ctrl.Result{}, err
}
}
}
monitor.hasChangesToSync = false
return ctrl.Result{}, nil
}
func (r *Reconciler) shouldLogFailureReason(obj *v1alpha1.LiveUpdateStateFailed) bool {
// ObjectNotFound errors are normal before the Apply has created the KubernetesDiscovery object.
return obj.Reason != reasonObjectNotFound
}
// Check for some invalid states.
func (r *Reconciler) ensureSelectorValid(lu *v1alpha1.LiveUpdate) *v1alpha1.LiveUpdateStateFailed {
selector := lu.Spec.Selector.Kubernetes
if selector == nil {
return createFailedState(lu, "Invalid", "No valid selector")
}
if selector.DiscoveryName == "" {
return createFailedState(lu, "Invalid", "Kubernetes selector requires DiscoveryName")
}
return nil
}
// If the failure state has changed, log it and write it to the apiserver.
func (r *Reconciler) handleFailure(ctx context.Context, lu *v1alpha1.LiveUpdate, failed *v1alpha1.LiveUpdateStateFailed) (ctrl.Result, error) {
isNew := lu.Status.Failed == nil || !apicmp.DeepEqual(lu.Status.Failed, failed)
if !isNew {
return ctrl.Result{}, nil
}
if r.shouldLogFailureReason(failed) {
logger.Get(ctx).Infof("LiveUpdate %q %s: %v", lu.Name, failed.Reason, failed.Message)
}
update := lu.DeepCopy()
update.Status.Failed = failed
err := r.client.Status().Update(ctx, update)
return ctrl.Result{}, err
}
// Create the monitor that tracks a live update. If the live update
// spec changes, wipe out all accumulated state.
func (r *Reconciler) ensureMonitorExists(name string, obj *v1alpha1.LiveUpdate) *monitor {
spec := obj.Spec
m, ok := r.monitors[name]
if ok && apicmp.DeepEqual(obj.Spec, m.spec) {
return m
}
m = &monitor{
manifestName: obj.Annotations[v1alpha1.AnnotationManifest],
spec: spec,
sources: make(map[string]*monitorSource),
containers: make(map[monitorContainerKey]monitorContainerStatus),
}
r.monitors[name] = m
return m
}
// Consume all FileEvents off the FileWatch objects.
// Returns true if we saw new file events.
//
// TODO(nick): Currently, it's entirely possible to miss file events. This has
// always been true (since operating systems themselves put limits on the event
// queue.) But it gets worse in a world where we read FileEvents from the API,
// since the FileWatch API itself adds lower limits.
//
// Long-term, we ought to have some way to reconnect/resync like other
// sync systems do (syncthing/rsync). e.g., diff the two file systems
// and update based on changes. But it also might make more sense to switch to a
// different library for syncing (e.g., Mutagen) now that live updates
// are decoupled from other file event-triggered tasks.
//
// In the meantime, Milas+Nick should figure out a way to handle this
// better in the short term.
func (r *Reconciler) reconcileSources(ctx context.Context, monitor *monitor) (bool, error) {
if len(monitor.spec.Sources) == 0 {
return false, nil
}
hasChange := false
for _, s := range monitor.spec.Sources {
oneChange, err := r.reconcileOneSource(ctx, monitor, s)
if err != nil {
return false, err
}
if oneChange {
hasChange = true
}
}
return hasChange, nil
}
// Consume one Source object.
func (r *Reconciler) reconcileOneSource(ctx context.Context, monitor *monitor, source v1alpha1.LiveUpdateSource) (bool, error) {
fwn := source.FileWatch
imn := source.ImageMap
var fw v1alpha1.FileWatch
if fwn != "" {
err := r.client.Get(ctx, types.NamespacedName{Name: fwn}, &fw)
if err != nil {
return false, err
}
}
var im v1alpha1.ImageMap
if imn != "" {
err := r.client.Get(ctx, types.NamespacedName{Name: imn}, &im)
if err != nil {
return false, err
}
}
events := fw.Status.FileEvents
if len(events) == 0 || fwn == "" {
return false, nil
}
mSource, ok := monitor.sources[fwn]
if !ok {
mSource = &monitorSource{
modTimeByPath: make(map[string]metav1.MicroTime),
}
monitor.sources[fwn] = mSource
}
newImageStatus := im.Status
imageChanged := false
if imn != "" {
imageChanged = mSource.lastImageStatus == nil ||
!apicmp.DeepEqual(&newImageStatus, mSource.lastImageStatus)
mSource.lastImageStatus = &im.Status
}
newLastFileEvent := events[len(events)-1]
event := mSource.lastFileEvent
fileWatchChanged := event == nil || !apicmp.DeepEqual(&newLastFileEvent, event)
mSource.lastFileEvent = &newLastFileEvent
if fileWatchChanged {
// Consume all the file events.
for _, event := range events {
eventTime := event.Time.Time
if newImageStatus.BuildStartTime != nil && newImageStatus.BuildStartTime.After(eventTime) {
continue
}
for _, f := range event.SeenFiles {
existing, ok := mSource.modTimeByPath[f]
if !ok || existing.Time.Before(event.Time.Time) {
mSource.modTimeByPath[f] = event.Time
}
}
}
}
return fileWatchChanged || imageChanged, nil
}
// Consume the TriggerQueue.
// This isn't formally represented in the API right now, it's just
// a ConfigMap to pull attributes off of.
// Returns true if we saw any changes.
func (r *Reconciler) reconcileTriggerQueue(ctx context.Context, monitor *monitor) (bool, error) {
|
queue, err := configmap.TriggerQueue(ctx, r.client)
if err != nil {
return false, client.IgnoreNotFound(err)
}
if monitor.lastTriggerQueue != nil && apicmp.DeepEqual(queue.Data, monitor.lastTriggerQueue.Data) {
return false, nil
}
monitor.lastTriggerQueue = queue
return true, nil
}
// Consume all objects off the KubernetesSelector.
// Returns true if we saw any changes to the objects we're watching.
func (r *Reconciler) reconcileKubernetesResource(ctx context.Context, monitor *monitor) (bool, error) {
selector := monitor.spec.Selector.Kubernetes
if selector == nil {
return false, nil
}
var kd *v1alpha1.KubernetesDiscovery
var ka *v1alpha1.KubernetesApply
changed := false
if selector.ApplyName != "" {
ka = &v1alpha1.KubernetesApply{}
err := r.client.Get(ctx, types.NamespacedName{Name: selector.ApplyName}, ka)
if err != nil {
return false, err
}
if monitor.lastKubernetesApplyStatus == nil ||
!apicmp.DeepEqual(monitor.lastKubernetesApplyStatus, &(ka.Status)) {
changed = true
}
}
kd = &v1alpha1.KubernetesDiscovery{}
err := r.client.Get(ctx, types.NamespacedName{Name: selector.DiscoveryName}, kd)
if err != nil {
return false, err
}
if monitor.lastKubernetesDiscovery == nil ||
!apicmp.DeepEqual(monitor.lastKubernetesDiscovery.Status, kd.Status) {
changed = true
}
if ka == nil {
monitor.lastKubernetesApplyStatus = nil
} else {
monitor.lastKubernetesApplyStatus = &(ka.Status)
}
monitor.lastKubernetesDiscovery = kd
return changed, nil
}
// Go through all the file changes, and delete files that aren't relevant
// to the current build.
//
// Determining the current build is a bit tricky, but our
// order of preference is:
// 1) If we have an ImageMap.BuildStartedAt, this is the gold standard.
// 2) If there's no ImageMap, we prefer the KubernetesApply.LastApplyStartTime.
// 3) If there's no KubernetesApply, we prefer the oldest pod
// in the filtered pod list.
func (r *Reconciler) garbageCollectFileChanges(res *k8sconv.KubernetesResource, monitor *monitor) {
for _, source := range monitor.spec.Sources {
fwn := source.FileWatch
mSource, ok := monitor.sources[fwn]
if !ok {
continue
}
lastImageStatus := mSource.lastImageStatus
var gcTime time.Time
if lastImageStatus != nil && lastImageStatus.BuildStartTime != nil {
gcTime = lastImageStatus.BuildStartTime.Time
} else if res.ApplyStatus != nil {
gcTime = res.ApplyStatus.LastApplyStartTime.Time
} else {
for _, pod := range res.FilteredPods {
if gcTime.IsZero() || (!pod.CreatedAt.IsZero() && pod.CreatedAt.Time.Before(gcTime)) {
gcTime = pod.CreatedAt.Time
}
}
}
if !gcTime.IsZero() {
// Delete all file events that happened before the
// latest build started.
for p, t := range mSource.modTimeByPath {
if gcTime.After(t.Time) {
delete(mSource.modTimeByPath, p)
}
}
// Delete all failures that happened before the
// latest build started.
//
// This mechanism isn't perfect - for example, it will start resyncing
// again to a container that's going to be replaced by the current
// build. But we also can't determine if a container is going to be
// replaced or not (particularly if the image didn't change).
for key, c := range monitor.containers {
if !c.failedLowWaterMark.IsZero() && gcTime.After(c.failedLowWaterMark.Time) {
c.failedLowWaterMark = metav1.MicroTime{}
c.failedReason = ""
c.failedMessage = ""
monitor.containers[key] = c
}
}
}
}
}
// Go through all the container monitors, and delete any that are no longer
// being selected. We don't care why they're not being selected.
func (r *Reconciler) garbageCollectMonitorContainers(res *k8sconv.KubernetesResource, monitor *monitor) {
podsByKey := map[monitorContainerKey]bool{}
for _, pod := range res.FilteredPods {
podsByKey[monitorContainerKey{podName: pod.Name, namespace: pod.Namespace}] = true
}
for key := range monitor.containers {
podKey := monitorContainerKey{podName: key.podName, namespace: key.namespace}
if !podsByKey[podKey] {
delete(monitor.containers, key)
}
}
}
// Visit all selected containers.
func (r *Reconciler) visitSelectedContainers(
kSelector *v1alpha1.LiveUpdateKubernetesSelector,
kResource *k8sconv.KubernetesResource,
visit func(pod v1alpha1.Pod, c v1alpha1.Container) bool) {
for _, pod := range kResource.FilteredPods {
for _, c := range pod.Containers {
if c.Name == "" {
// ignore any blatantly invalid containers
continue
}
// LiveUpdateKubernetesSelector must specify EITHER image OR container name
if kSelector.Image != "" {
imageRef, err := container.ParseNamed(c.Image)
if err != nil || imageRef == nil || kSelector.Image != reference.FamiliarName(imageRef) {
continue
}
} else if kSelector.ContainerName != c.Name {
continue
}
stop := visit(pod, c)
if stop {
return
}
}
}
}
func (r *Reconciler) dispatchStartBuildAction(ctx context.Context, lu *v1alpha1.LiveUpdate, filesChanged []string) {
manifestName := lu.Annotations[v1alpha1.AnnotationManifest]
spanID := lu.Annotations[v1alpha1.AnnotationSpanID]
r.store.Dispatch(buildcontrols.BuildStartedAction{
ManifestName: model.ManifestName(manifestName),
StartTime: time.Now(),
FilesChanged: filesChanged,
Reason: model.BuildReasonFlagChangedFiles,
SpanID: logstore.SpanID(spanID),
FullBuildTriggered: false,
})
buildcontrols.LogBuildEntry(ctx, buildcontrols.BuildEntry{
Name: model.ManifestName(manifestName),
BuildReason: model.BuildReasonFlagChangedFiles,
FilesChanged: filesChanged,
})
}
func (r *Reconciler) dispatchCompleteBuildAction(lu *v1alpha1.LiveUpdate, newStatus v1alpha1.LiveUpdateStatus) {
manifestName := model.ManifestName(lu.Annotations[v1alpha1.AnnotationManifest])
spanID := logstore.SpanID(lu.Annotations[v1alpha1.AnnotationSpanID])
var err error
if newStatus.Failed != nil {
err = fmt.Errorf("%s", newStatus.Failed.Message)
}
imageTargetID := model.TargetID{
Type: model.TargetTypeImage,
Name: model.TargetName(apis.SanitizeName(lu.Spec.Selector.Kubernetes.Image)),
}
containerIDs := []container.ID{}
for _, status := range newStatus.Containers {
if status.Waiting == nil {
containerIDs = append(containerIDs, container.ID(status.ContainerID))
}
}
result := store.NewLiveUpdateBuildResult(imageTargetID, containerIDs)
resultSet := store.BuildResultSet{imageTargetID: result}
r.store.Dispatch(buildcontrols.NewBuildCompleteAction(manifestName, spanID, resultSet, err))
}
// Convert the currently tracked state into a set of inputs
// to the updater, then apply them.
func (r *Reconciler) maybeSync(ctx context.Context, lu *v1alpha1.LiveUpdate, monitor *monitor) v1alpha1.LiveUpdateStatus {
var status v1alpha1.LiveUpdateStatus
kSelector := lu.Spec.Selector.Kubernetes
if kSelector == nil {
status.Failed = createFailedState(lu, "Invalid", "no valid selector")
return status
}
kResource, err := k8sconv.NewKubernetesResource(monitor.lastKubernetesDiscovery, monitor.lastKubernetesApplyStatus)
if err != nil {
status.Failed = createFailedState(lu, "KubernetesError", fmt.Sprintf("creating kube resource: %v", err))
return status
}
manifestName := lu.Annotations[v1alpha1.AnnotationManifest]
updateMode := lu.Annotations[liveupdate.AnnotationUpdateMode]
inTriggerQueue := monitor.lastTriggerQueue != nil && manifestName != "" &&
configmap.InTriggerQueue(monitor.lastTriggerQueue, types.NamespacedName{Name: manifestName})
isUpdateModeManual := updateMode == liveupdate.UpdateModeManual
isWaitingOnTrigger := false
if isUpdateModeManual && !inTriggerQueue {
// In manual mode, we should always wait for a trigger before live updating anything.
isWaitingOnTrigger = true
}
r.garbageCollectFileChanges(kResource, monitor)
r.garbageCollectMonitorContainers(kResource, monitor)
// Go through all the container monitors, and check if any of them are unrecoverable.
// If they are, it's not important to figure out why.
r.visitSelectedContainers(kSelector, kResource, func(pod v1alpha1.Pod, c v1alpha1.Container) bool {
cKey := monitorContainerKey{
containerID: c.ID,
podName: pod.Name,
namespace: pod.Namespace,
}
cStatus, ok := monitor.containers[cKey]
if ok && cStatus.failedReason != "" {
status.Failed = createFailedState(lu, cStatus.failedReason, cStatus.failedMessage)
return true
}
return false
})
if status.Failed != nil {
return status
}
updateEventDispatched := false
// Visit all containers, apply changes, and return their statuses.
terminatedContainerPodName := ""
hasAnyFilesToSync := false
r.visitSelectedContainers(kSelector, kResource, func(pod v1alpha1.Pod, cInfo v1alpha1.Container) bool {
c := liveupdates.Container{
ContainerID: container.ID(cInfo.ID),
ContainerName: container.Name(cInfo.Name),
PodID: k8s.PodID(pod.Name),
Namespace: k8s.Namespace(pod.Namespace),
}
cKey := monitorContainerKey{
containerID: cInfo.ID,
podName: pod.Name,
namespace: pod.Namespace,
}
highWaterMark := r.startedTime
cStatus, ok := monitor.containers[cKey]
if ok && !cStatus.lastFileTimeSynced.IsZero() {
highWaterMark = cStatus.lastFileTimeSynced
}
// Determine the changed files.
filesChanged := []string{}
newHighWaterMark := highWaterMark
newLowWaterMark := metav1.MicroTime{}
for _, source := range monitor.sources {
for f, t := range source.modTimeByPath {
if t.After(highWaterMark.Time) {
filesChanged = append(filesChanged, f)
if newLowWaterMark.IsZero() || t.Before(&newLowWaterMark) {
newLowWaterMark = t
}
if t.After(newHighWaterMark.Time) {
newHighWaterMark = t
}
}
}
}
// Sort the files so that they're deterministic.
filesChanged = sliceutils.DedupedAndSorted(filesChanged)
if len(filesChanged) > 0 {
hasAnyFilesToSync = true
}
// Ignore completed pods/containers.
// This is a bit tricky to handle correctly, but is handled at
// the end of this function.
if pod.Phase == string(v1.PodSucceeded) || pod.Phase == string(v1.PodFailed) || cInfo.State.Terminated != nil {
if terminatedContainerPodName == "" {
terminatedContainerPodName = pod.Name
}
return false
}
var waiting *v1alpha1.LiveUpdateContainerStateWaiting
// We interpret "no container id" as a waiting state
// (terminated states should have been caught above).
if cInfo.State.Running == nil || cInfo.ID == "" {
waiting = &v1alpha1.LiveUpdateContainerStateWaiting{
Reason: "ContainerWaiting",
Message: "Waiting for container to start",
}
} else if isWaitingOnTrigger {
waiting = &v1alpha1.LiveUpdateContainerStateWaiting{
Reason: "Trigger",
Message: "Only updates on manual trigger",
}
}
// Create a plan to update the container.
filesApplied := false
var oneUpdateStatus v1alpha1.LiveUpdateStatus
plan, failed := r.createLiveUpdatePlan(lu.Spec, filesChanged)
if failed != nil {
// The plan told us to stop updating - this container is unrecoverable.
oneUpdateStatus.Failed = failed
} else if len(plan.SyncPaths) == 0 {
// The plan told us that there are no updates to do.
oneUpdateStatus.Containers = []v1alpha1.LiveUpdateContainerStatus{{
ContainerName: cInfo.Name,
ContainerID: cInfo.ID,
PodName: pod.Name,
Namespace: pod.Namespace,
LastFileTimeSynced: cStatus.lastFileTimeSynced,
Waiting: waiting,
}}
} else if cInfo.State.Waiting != nil && cInfo.State.Waiting.Reason == "CrashLoopBackOff" {
// At this point, the plan told us that we have some files to sync.
// Check if the container is in a state to receive those updates.
// If the container is crashlooping, that means it might not be up long enough
// to be able to receive a live-update. Treat this as an unrecoverable failure case.
oneUpdateStatus.Failed = createFailedState(lu, "CrashLoopBackOff",
fmt.Sprintf("Cannot live update because container crashing. Pod: %s", pod.Name))
} else if waiting != nil {
// Mark the container as waiting, so we have a record of it. No need to sync any files.
oneUpdateStatus.Containers = []v1alpha1.LiveUpdateContainerStatus{{
ContainerName: cInfo.Name,
ContainerID: cInfo.ID,
PodName: pod.Name,
Namespace: pod.Namespace,
LastFileTimeSynced: cStatus.lastFileTimeSynced,
Waiting: waiting,
}}
} else {
// Log progress and treat this as an update in the engine state.
if !updateEventDispatched {
updateEventDispatched = true
r.dispatchStartBuildAction(ctx, lu, filesChanged)
}
// Apply the change to the container.
oneUpdateStatus = r.applyInternal(ctx, lu.Spec, Input{
IsDC: false, // update this once we support DockerCompose in the API.
ChangedFiles: plan.SyncPaths,
Containers: []liveupdates.Container{c},
LastFileTimeSynced: newHighWaterMark,
})
filesApplied = true
}
// Merge the status from the single update into the overall liveupdate status.
adjustFailedStateTimestamps(lu, &oneUpdateStatus)
// Update the monitor based on the result of the applied changes.
if oneUpdateStatus.Failed != nil {
cStatus.failedReason = oneUpdateStatus.Failed.Reason
cStatus.failedMessage = oneUpdateStatus.Failed.Message
cStatus.failedLowWaterMark = newLowWaterMark
} else if filesApplied {
cStatus.lastFileTimeSynced = newHighWaterMark
}
monitor.containers[cKey] = cStatus
// Update the status based on the result of the applied changes.
if oneUpdateStatus.Failed != nil {
status.Failed = oneUpdateStatus.Failed
status.Containers = nil
return true
}
status.Containers = append(status.Containers, oneUpdateStatus.Containers...)
return false
})
// If the only containers we're connected to are terminated containers,
// there are two cases we need to worry about:
//
// 1) The pod has completed, and will never run again (like a Job).
// 2) This is an old pod, and we're waiting for the new pod to rollout.
//
// We don't really have a great way to distinguish between these two cases.
//
// If we get to the end of this loop and haven't found any "live" pods,
// we assume we're in state (1) (to prevent waiting forever).
if status.Failed == nil && terminatedContainerPodName != "" &&
hasAnyFilesToSync && len(status.Containers) == 0 {
status.Failed = createFailedState(lu, "Terminated",
fmt.Sprintf("Container for live update is stopped. Pod name: %s", terminatedContainerPodName))
}
if updateEventDispatched {
r.dispatchCompleteBuildAction(lu, status)
}
return status
}
func (r *Reconciler) createLiveUpdatePlan(spec v1alpha1.LiveUpdateSpec, filesChanged []string) (liveupdates.LiveUpdatePlan, *v1alpha1.LiveUpdateStateFailed) {
plan, err := liveupdates.NewLiveUpdatePlan(spec, filesChanged)
if err != nil {
return plan, &v1alpha1.LiveUpdateStateFailed{
Reason: "UpdateStopped",
Message: fmt.Sprintf("No update plan: %v", err),
}
}
if len(plan.NoMatchPaths) > 0 {
return plan, &v1alpha1.LiveUpdateStateFailed{
Reason: "UpdateStopped",
Message: fmt.Sprintf("Found file(s) not matching any sync (files: %s)",
ospath.FormatFileChangeList(plan.NoMatchPaths)),
}
}
// If any changed files match a FallBackOn file, fall back to next BuildAndDeployer
if len(plan.StopPaths) != 0 {
return plan, &v1alpha1.LiveUpdateStateFailed{
Reason: "UpdateStopped",
Message: fmt.Sprintf("Detected change to stop file %q", plan.StopPaths[0]),
}
}
return plan, nil
}
// Generate the correct transition time on the Failed state.
func adjustFailedStateTimestamps(obj *v1alpha1.LiveUpdate, newStatus *v1alpha1.LiveUpdateStatus) {
if newStatus.Failed == nil {
return
}
newStatus.Failed = createFailedState(obj, newStatus.Failed.Reason, newStatus.Failed.Message)
}
// Create a new failed state and update the transition timestamp if appropriate.
func createFailedState(obj *v1alpha1.LiveUpdate, reason, msg string) *v1alpha1.LiveUpdateStateFailed {
failed := &v1alpha1.LiveUpdateStateFailed{Reason: reason, Message: msg}
transitionTime := apis.NowMicro()
if obj.Status.Failed != nil && obj.Status.Failed.Reason == failed.Reason {
// If the reason hasn't changed, don't treat this as a transition.
transitionTime = obj.Status.Failed.LastTransitionTime
}
failed.LastTransitionTime = transitionTime
return failed
}
// Live-update containers by copying files and running exec commands.
//
// Update the apiserver when finished.
//
// We expose this as a public method as a hack! Currently, in Tilt, BuildController
// decides when to kick off the live update, and run a full image build+deploy if it
// fails. Eventually we'll invert that relationship, so that BuildController
// (and other API reconcilers) watch the live update API.
func (r *Reconciler) ForceApply(
ctx context.Context,
nn types.NamespacedName,
spec v1alpha1.LiveUpdateSpec,
input Input) (v1alpha1.LiveUpdateStatus, error) {
var obj v1alpha1.LiveUpdate
err := r.client.Get(ctx, nn, &obj)
if err != nil {
return v1alpha1.LiveUpdateStatus{}, err
}
status := r.applyInternal(ctx, spec, input)
adjustFailedStateTimestamps(&obj, &status)
if !apicmp.DeepEqual(status, obj.Status) {
update := obj.DeepCopy()
update.Status = status
err := r.client.Status().Update(ctx, update)
if err != nil {
return v1alpha1.LiveUpdateStatus{}, err
}
}
return status, nil
}
// Like apply, but doesn't write the status to the apiserver.
func (r *Reconciler) applyInternal(
ctx context.Context,
spec v1alpha1.LiveUpdateSpec,
input Input) v1alpha1.LiveUpdateStatus {
var result v1alpha1.LiveUpdateStatus
cu := r.containerUpdater(input)
l := logger.Get(ctx)
containers := input.Containers
names := liveupdates.ContainerDisplayNames(containers)
suffix := ""
if len(containers) != 1 {
suffix = "(s)"
}
runSteps := liveupdate.RunSteps(spec)
changedFiles := input.ChangedFiles
hotReload := !liveupdate.ShouldRestart(spec)
boiledSteps, err := build.BoilRuns(runSteps, changedFiles)
if err != nil {
result.Failed = &v1alpha1.LiveUpdateStateFailed{
Reason: "Invalid",
Message: fmt.Sprintf("Building exec: %v", err),
}
return result
}
// rm files from container
toRemove, toArchive, err := build.MissingLocalPaths(ctx, changedFiles)
if err != nil {
result.Failed = &v1alpha1.LiveUpdateStateFailed{
Reason: "Invalid",
Message: fmt.Sprintf("Mapping paths: %v", err),
}
return result
}
if len(toRemove) > 0 {
l.Infof("Will delete %d file(s) from container%s: %s", len(toRemove), suffix, names)
for _, pm := range toRemove {
l.Infof("- '%s' (matched local path: '%s')", pm.ContainerPath, pm.LocalPath)
}
}
if len(toArchive) > 0 {
l.Infof("Will copy %d file(s) to container%s: %s", len(toArchive), suffix, names)
for _, pm := range toArchive {
l.Infof("- %s", pm.PrettyStr())
}
}
var lastExecErrorStatus *v1alpha1.LiveUpdateContainerStatus
for _, cInfo := range containers {
// TODO(nick): We should try to distinguish between cases where the tar writer
// fails (which is recoverable) vs when the server-side unpacking
// fails (which may not be recoverable).
archive := build.TarArchiveForPaths(ctx, toArchive, nil)
err = cu.UpdateContainer(ctx, cInfo, archive,
build.PathMappingsToContainerPaths(toRemove), boiledSteps, hotReload)
_ = archive.Close()
lastFileTimeSynced := input.LastFileTimeSynced
if lastFileTimeSynced.IsZero() {
lastFileTimeSynced = apis.NowMicro()
}
cStatus := v1alpha1.LiveUpdateContainerStatus{
ContainerName: cInfo.ContainerName.String(),
ContainerID: cInfo.ContainerID.String(),
PodName: cInfo.PodID.String(),
Namespace: cInfo.Namespace.String(),
LastFileTimeSynced: lastFileTimeSynced,
}
if err != nil {
if runFail, ok := build.MaybeRunStepFailure(err); ok {
// Keep running updates -- we want all containers to have the same files on them
// even if the Runs don't succeed
logger.Get(ctx).Infof(" → Failed to update container %s: run step %q failed with exit code: %d",
cInfo.DisplayName(), runFail.Cmd.String(), runFail.ExitCode)
cStatus.LastExecError = err.Error()
lastExecErrorStatus = &cStatus
} else {
// Something went wrong with this update and it's NOT the user's fault--
// likely a infrastructure error. Bail, and fall back to full build.
result.Failed = &v1alpha1.LiveUpdateStateFailed{
Reason: "UpdateFailed",
Message: fmt.Sprintf("Updating pod %s: %v", cStatus.PodName, err),
}
return result
}
} else {
logger.Get(ctx).Infof(" → Container %s updated!", cInfo.DisplayName())
if lastExecErrorStatus != nil {
// This build succeeded, but previously at least one failed due to user error.
// We may have inconsistent state--bail, and fall back to full build.
result.Failed = &v1alpha1.LiveUpdateStateFailed{
Reason: "PodsInconsistent",
Message: fmt.Sprintf("Pods in inconsistent state. Success: pod %s. Failure: pod %s. Error: %v",
cStatus.PodName, lastExecErrorStatus.PodName, lastExecErrorStatus.LastExecError),
}
return result
}
}
result.Containers = append(result.Containers, cStatus)
}
return result
}
func (r *Reconciler) containerUpdater(input Input) containerupdate.ContainerUpdater {
isDC := input.IsDC
if isDC || r.updateMode == liveupdates.UpdateModeContainer {
return r.DockerUpdater
}
if r.updateMode == liveupdates.UpdateModeKubectlExec {
return r.ExecUpdater
}
dcu, ok := r.DockerUpdater.(*containerupdate.DockerUpdater)
if ok && dcu.WillBuildToKubeContext(r.kubeContext) {
return r.DockerUpdater
}
return r.ExecUpdater
}
func (r *Reconciler) CreateBuilder(mgr ctrl.Manager) (*builder.Builder, error) {
b := ctrl.NewControllerManagedBy(mgr).
For(&v1alpha1.LiveUpdate{}).
Watches(&source.Kind{Type: &v1alpha1.KubernetesDiscovery{}},
handler.EnqueueRequestsFromMapFunc(r.indexer.Enqueue)).
Watches(&source.Kind{Type: &v1alpha1.KubernetesApply{}},
handler.EnqueueRequestsFromMapFunc(r.indexer.Enqueue)).
Watches(&source.Kind{Type: &v1alpha1.FileWatch{}},
handler.EnqueueRequestsFromMapFunc(r.indexer.Enqueue)).
Watches(&source.Kind{Type: &v1alpha1.ImageMap{}},
handler.EnqueueRequestsFromMapFunc(r.indexer.Enqueue)).
Watches(&source.Kind{Type: &v1alpha1.ConfigMap{}},
handler.EnqueueRequestsFromMapFunc(r.enqueueTriggerQueue))
return b, nil
}
// Find any objects we need to reconcile based on the trigger queue.
func (r *Reconciler) enqueueTriggerQueue(obj client.Object) []reconcile.Request {
cm, ok := obj.(*v1alpha1.ConfigMap)
if !ok {
return nil
}
if cm.Name != configmap.TriggerQueueName {
return nil
}
// We can only trigger liveupdates that have run once, so search
// through the map of known liveupdates
names := configmap.NamesInTriggerQueue(cm)
nameSet := make(map[string]bool)
for _, name := range names {
nameSet[name] = true
}
r.mu.Lock()
defer r.mu.Unlock()
requests := []reconcile.Request{}
for name, monitor := range r.monitors {
if nameSet[monitor.manifestName] {
requests = append(requests, reconcile.Request{NamespacedName: types.NamespacedName{Name: name}})
}
}
return requests
}
// indexLiveUpdate returns keys of objects referenced _by_ the LiveUpdate object for reverse lookup including:
// - FileWatch
// - ImageMapName
// - KubernetesDiscovery
// - KubernetesApply
func indexLiveUpdate(obj ctrlclient.Object) []indexer.Key {
lu := obj.(*v1alpha1.LiveUpdate)
var result []indexer.Key
for _, s := range lu.Spec.Sources {
fwn := s.FileWatch
imn := s.ImageMap
if fwn != "" {
result = append(result, indexer.Key{
Name: types.NamespacedName{
Namespace: lu.Namespace,
Name: fwn,
},
GVK: fwGVK,
})
}
if imn != "" {
result = append(result, indexer.Key{
Name: types.NamespacedName{
Namespace: lu.Namespace,
Name: imn,
},
GVK: imageMapGVK,
})
}
}
if lu.Spec.Selector.Kubernetes != nil {
if lu.Spec.Selector.Kubernetes.DiscoveryName != "" {
result = append(result, indexer.Key{
Name: types.NamespacedName{
Namespace: lu.Namespace,
Name: lu.Spec.Selector.Kubernetes.DiscoveryName,
},
GVK: discoveryGVK,
})
}
if lu.Spec.Selector.Kubernetes.ApplyName != "" {
result = append(result, indexer.Key{
Name: types.NamespacedName{
Namespace: lu.Namespace,
Name: lu.Spec.Selector.Kubernetes.ApplyName,
},
GVK: applyGVK,
})
}
}
return result
}
| |
index.ts
|
import { expose } from '@chainlink/ea-bootstrap'
import { makeExecute } from './adapter'
import { makeConfig } from './config'
|
export = { makeExecute, makeConfig, ...expose(makeExecute()) }
|
|
rpc_psbt.py
|
#!/usr/bin/env python3
# Copyright (c) 2018-2020 The worldwideweb Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the Partially Signed Transaction RPCs.
"""
from decimal import Decimal
from itertools import product
from test_framework.descriptors import descsum_create
from test_framework.key import ECKey
from test_framework.test_framework import worldwidewebTestFramework
from test_framework.util import (
assert_approx,
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
find_output,
)
from test_framework.wallet_util import bytes_to_wif
import json
import os
MAX_BIP125_RBF_SEQUENCE = 0xfffffffd
# Create one-input, one-output, no-fee transaction:
class PSBTTest(worldwidewebTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [
["-walletrbf=1"],
["-walletrbf=0", "-changetype=legacy"],
[]
]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
# TODO: Re-enable this test with segwit v1
def
|
(self):
mining_node = self.nodes[2]
offline_node = self.nodes[0]
online_node = self.nodes[1]
# Disconnect offline node from others
# Topology of test network is linear, so this one call is enough
self.disconnect_nodes(0, 1)
# Create watchonly on online_node
online_node.createwallet(wallet_name='wonline', disable_private_keys=True)
wonline = online_node.get_wallet_rpc('wonline')
w2 = online_node.get_wallet_rpc('')
# Mine a transaction that credits the offline address
offline_addr = offline_node.getnewaddress(address_type="p2sh-segwit")
online_addr = w2.getnewaddress(address_type="p2sh-segwit")
wonline.importaddress(offline_addr, "", False)
mining_node.sendtoaddress(address=offline_addr, amount=1.0)
self.generate(mining_node, nblocks=1)
self.sync_blocks([mining_node, online_node])
# Construct an unsigned PSBT on the online node (who doesn't know the output is Segwit, so will include a non-witness UTXO)
utxos = wonline.listunspent(addresses=[offline_addr])
raw = wonline.createrawtransaction([{"txid":utxos[0]["txid"], "vout":utxos[0]["vout"]}],[{online_addr:0.9999}])
psbt = wonline.walletprocesspsbt(online_node.converttopsbt(raw))["psbt"]
assert "non_witness_utxo" in mining_node.decodepsbt(psbt)["inputs"][0]
# Have the offline node sign the PSBT (which will update the UTXO to segwit)
signed_psbt = offline_node.walletprocesspsbt(psbt)["psbt"]
assert "witness_utxo" in mining_node.decodepsbt(signed_psbt)["inputs"][0]
# Make sure we can mine the resulting transaction
txid = mining_node.sendrawtransaction(mining_node.finalizepsbt(signed_psbt)["hex"])
self.generate(mining_node, 1)
self.sync_blocks([mining_node, online_node])
assert_equal(online_node.gettxout(txid,0)["confirmations"], 1)
wonline.unloadwallet()
# Reconnect
self.connect_nodes(0, 1)
self.connect_nodes(0, 2)
def assert_change_type(self, psbtx, expected_type):
"""Assert that the given PSBT has a change output with the given type."""
# The decodepsbt RPC is stateless and independent of any settings, we can always just call it on the first node
decoded_psbt = self.nodes[0].decodepsbt(psbtx["psbt"])
changepos = psbtx["changepos"]
assert_equal(decoded_psbt["tx"]["vout"][changepos]["scriptPubKey"]["type"], expected_type)
def run_test(self):
# Create and fund a raw tx for sending 10 BTC
psbtx1 = self.nodes[0].walletcreatefundedpsbt([], {self.nodes[2].getnewaddress():10})['psbt']
# If inputs are specified, do not automatically add more:
utxo1 = self.nodes[0].listunspent()[0]
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[0].walletcreatefundedpsbt, [{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90})
psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}, 0, {"add_inputs": True})['psbt']
assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2)
# Inputs argument can be null
self.nodes[0].walletcreatefundedpsbt(None, {self.nodes[2].getnewaddress():10})
# Node 1 should not be able to add anything to it but still return the psbtx same as before
psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt']
assert_equal(psbtx1, psbtx)
# Node 0 should not be able to sign the transaction with the wallet is locked
self.nodes[0].encryptwallet("password")
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].walletprocesspsbt, psbtx)
# Node 0 should be able to process without signing though
unsigned_tx = self.nodes[0].walletprocesspsbt(psbtx, False)
assert_equal(unsigned_tx['complete'], False)
self.nodes[0].walletpassphrase(passphrase="password", timeout=1000000)
# Sign the transaction and send
signed_tx = self.nodes[0].walletprocesspsbt(psbtx)['psbt']
final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex']
self.nodes[0].sendrawtransaction(final_tx)
# Manually selected inputs can be locked:
assert_equal(len(self.nodes[0].listlockunspent()), 0)
utxo1 = self.nodes[0].listunspent()[0]
psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():1}, 0,{"lockUnspents": True})["psbt"]
assert_equal(len(self.nodes[0].listlockunspent()), 1)
# Locks are ignored for manually selected inputs
self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():1}, 0)
# Create p2sh, p2wpkh, and p2wsh addresses
pubkey0 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())['pubkey']
pubkey1 = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
pubkey2 = self.nodes[2].getaddressinfo(self.nodes[2].getnewaddress())['pubkey']
# Setup watchonly wallets
self.nodes[2].createwallet(wallet_name='wmulti', disable_private_keys=True)
wmulti = self.nodes[2].get_wallet_rpc('wmulti')
# Create all the addresses
p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
if not self.options.descriptors:
wmulti.importaddress(p2sh)
wmulti.importaddress(p2wsh)
wmulti.importaddress(p2sh_p2wsh)
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
p2pkh = self.nodes[1].getnewaddress("", "legacy")
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
# fund those addresses
rawtx = self.nodes[0].createrawtransaction([], {p2sh:10, p2wsh:10, p2wpkh:10, p2sh_p2wsh:10, p2sh_p2wpkh:10, p2pkh:10})
rawtx = self.nodes[0].fundrawtransaction(rawtx, {"changePosition":3})
signed_tx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])['hex']
txid = self.nodes[0].sendrawtransaction(signed_tx)
self.generate(self.nodes[0], 6)
self.sync_all()
# Find the output pos
p2sh_pos = -1
p2wsh_pos = -1
p2wpkh_pos = -1
p2pkh_pos = -1
p2sh_p2wsh_pos = -1
p2sh_p2wpkh_pos = -1
decoded = self.nodes[0].decoderawtransaction(signed_tx)
for out in decoded['vout']:
if out['scriptPubKey']['address'] == p2sh:
p2sh_pos = out['n']
elif out['scriptPubKey']['address'] == p2wsh:
p2wsh_pos = out['n']
elif out['scriptPubKey']['address'] == p2wpkh:
p2wpkh_pos = out['n']
elif out['scriptPubKey']['address'] == p2sh_p2wsh:
p2sh_p2wsh_pos = out['n']
elif out['scriptPubKey']['address'] == p2sh_p2wpkh:
p2sh_p2wpkh_pos = out['n']
elif out['scriptPubKey']['address'] == p2pkh:
p2pkh_pos = out['n']
inputs = [{"txid": txid, "vout": p2wpkh_pos}, {"txid": txid, "vout": p2sh_p2wpkh_pos}, {"txid": txid, "vout": p2pkh_pos}]
outputs = [{self.nodes[1].getnewaddress(): 29.99}]
# spend single key from node 1
created_psbt = self.nodes[1].walletcreatefundedpsbt(inputs, outputs)
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(created_psbt['psbt'])
# Make sure it has both types of UTXOs
decoded = self.nodes[1].decodepsbt(walletprocesspsbt_out['psbt'])
assert 'non_witness_utxo' in decoded['inputs'][0]
assert 'witness_utxo' in decoded['inputs'][0]
# Check decodepsbt fee calculation (input values shall only be counted once per UTXO)
assert_equal(decoded['fee'], created_psbt['fee'])
assert_equal(walletprocesspsbt_out['complete'], True)
self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex'])
self.log.info("Test walletcreatefundedpsbt fee rate of 10000 sat/vB and 0.1 BTC/kvB produces a total fee at or slightly below -maxtxfee (~0.05290000)")
res1 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"fee_rate": 10000, "add_inputs": True})
assert_approx(res1["fee"], 0.055, 0.005)
res2 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"feeRate": "0.1", "add_inputs": True})
assert_approx(res2["fee"], 0.055, 0.005)
self.log.info("Test min fee rate checks with walletcreatefundedpsbt are bypassed, e.g. a fee_rate under 1 sat/vB is allowed")
res3 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"fee_rate": "0.999", "add_inputs": True})
assert_approx(res3["fee"], 0.00000381, 0.0000001)
res4 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"feeRate": 0.00000999, "add_inputs": True})
assert_approx(res4["fee"], 0.00000381, 0.0000001)
self.log.info("Test min fee rate checks with walletcreatefundedpsbt are bypassed and that funding non-standard 'zero-fee' transactions is valid")
for param, zero_value in product(["fee_rate", "feeRate"], [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]):
assert_equal(0, self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {param: zero_value, "add_inputs": True})["fee"])
self.log.info("Test invalid fee rate settings")
for param, value in {("fee_rate", 100000), ("feeRate", 1)}:
assert_raises_rpc_error(-4, "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: value, "add_inputs": True})
assert_raises_rpc_error(-3, "Amount out of range",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: -1, "add_inputs": True})
assert_raises_rpc_error(-3, "Amount is not a number or string",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: {"foo": "bar"}, "add_inputs": True})
# Test fee rate values that don't pass fixed-point parsing checks.
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
assert_raises_rpc_error(-3, "Invalid amount",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: invalid_value, "add_inputs": True})
# Test fee_rate values that cannot be represented in sat/vB.
for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
assert_raises_rpc_error(-3, "Invalid amount",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"fee_rate": invalid_value, "add_inputs": True})
self.log.info("- raises RPC error if both feeRate and fee_rate are passed")
assert_raises_rpc_error(-8, "Cannot specify both fee_rate (sat/vB) and feeRate (BTC/kvB)",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"fee_rate": 0.1, "feeRate": 0.1, "add_inputs": True})
self.log.info("- raises RPC error if both feeRate and estimate_mode passed")
assert_raises_rpc_error(-8, "Cannot specify both estimate_mode and feeRate",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": "economical", "feeRate": 0.1, "add_inputs": True})
for param in ["feeRate", "fee_rate"]:
self.log.info("- raises RPC error if both {} and conf_target are passed".format(param))
assert_raises_rpc_error(-8, "Cannot specify both conf_target and {}. Please provide either a confirmation "
"target in blocks for automatic fee estimation, or an explicit fee rate.".format(param),
self.nodes[1].walletcreatefundedpsbt ,inputs, outputs, 0, {param: 1, "conf_target": 1, "add_inputs": True})
self.log.info("- raises RPC error if both fee_rate and estimate_mode are passed")
assert_raises_rpc_error(-8, "Cannot specify both estimate_mode and fee_rate",
self.nodes[1].walletcreatefundedpsbt ,inputs, outputs, 0, {"fee_rate": 1, "estimate_mode": "economical", "add_inputs": True})
self.log.info("- raises RPC error with invalid estimate_mode settings")
for k, v in {"number": 42, "object": {"foo": "bar"}}.items():
assert_raises_rpc_error(-3, "Expected type string for estimate_mode, got {}".format(k),
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": v, "conf_target": 0.1, "add_inputs": True})
for mode in ["", "foo", Decimal("3.141592")]:
assert_raises_rpc_error(-8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"',
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": mode, "conf_target": 0.1, "add_inputs": True})
self.log.info("- raises RPC error with invalid conf_target settings")
for mode in ["unset", "economical", "conservative"]:
self.log.debug("{}".format(mode))
for k, v in {"string": "", "object": {"foo": "bar"}}.items():
assert_raises_rpc_error(-3, "Expected type number for conf_target, got {}".format(k),
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": mode, "conf_target": v, "add_inputs": True})
for n in [-1, 0, 1009]:
assert_raises_rpc_error(-8, "Invalid conf_target, must be between 1 and 1008", # max value of 1008 per src/policy/fees.h
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": mode, "conf_target": n, "add_inputs": True})
self.log.info("Test walletcreatefundedpsbt with too-high fee rate produces total fee well above -maxtxfee and raises RPC error")
# previously this was silently capped at -maxtxfee
for bool_add, outputs_array in {True: outputs, False: [{self.nodes[1].getnewaddress(): 1}]}.items():
msg = "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)"
assert_raises_rpc_error(-4, msg, self.nodes[1].walletcreatefundedpsbt, inputs, outputs_array, 0, {"fee_rate": 1000000, "add_inputs": bool_add})
assert_raises_rpc_error(-4, msg, self.nodes[1].walletcreatefundedpsbt, inputs, outputs_array, 0, {"feeRate": 1, "add_inputs": bool_add})
self.log.info("Test various PSBT operations")
# partially sign multisig things with node 1
psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt']
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx)
psbtx = walletprocesspsbt_out['psbt']
assert_equal(walletprocesspsbt_out['complete'], False)
# Unload wmulti, we don't need it anymore
wmulti.unloadwallet()
# partially sign with node 2. This should be complete and sendable
walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx)
assert_equal(walletprocesspsbt_out['complete'], True)
self.nodes[2].sendrawtransaction(self.nodes[2].finalizepsbt(walletprocesspsbt_out['psbt'])['hex'])
# check that walletprocesspsbt fails to decode a non-psbt
rawtx = self.nodes[1].createrawtransaction([{"txid":txid,"vout":p2wpkh_pos}], {self.nodes[1].getnewaddress():9.99})
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[1].walletprocesspsbt, rawtx)
# Convert a non-psbt to psbt and make sure we can decode it
rawtx = self.nodes[0].createrawtransaction([], {self.nodes[1].getnewaddress():10})
rawtx = self.nodes[0].fundrawtransaction(rawtx)
new_psbt = self.nodes[0].converttopsbt(rawtx['hex'])
self.nodes[0].decodepsbt(new_psbt)
# Make sure that a non-psbt with signatures cannot be converted
# Error could be either "TX decode failed" (segwit inputs causes parsing to fail) or "Inputs must not have scriptSigs and scriptWitnesses"
# We must set iswitness=True because the serialized transaction has inputs and is therefore a witness transaction
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])
assert_raises_rpc_error(-22, "", self.nodes[0].converttopsbt, hexstring=signedtx['hex'], iswitness=True)
assert_raises_rpc_error(-22, "", self.nodes[0].converttopsbt, hexstring=signedtx['hex'], permitsigdata=False, iswitness=True)
# Unless we allow it to convert and strip signatures
self.nodes[0].converttopsbt(signedtx['hex'], True)
# Explicitly allow converting non-empty txs
new_psbt = self.nodes[0].converttopsbt(rawtx['hex'])
self.nodes[0].decodepsbt(new_psbt)
# Create outputs to nodes 1 and 2
node1_addr = self.nodes[1].getnewaddress()
node2_addr = self.nodes[2].getnewaddress()
txid1 = self.nodes[0].sendtoaddress(node1_addr, 13)
txid2 = self.nodes[0].sendtoaddress(node2_addr, 13)
blockhash = self.generate(self.nodes[0], 6)[0]
self.sync_all()
vout1 = find_output(self.nodes[1], txid1, 13, blockhash=blockhash)
vout2 = find_output(self.nodes[2], txid2, 13, blockhash=blockhash)
# Create a psbt spending outputs from nodes 1 and 2
psbt_orig = self.nodes[0].createpsbt([{"txid":txid1, "vout":vout1}, {"txid":txid2, "vout":vout2}], {self.nodes[0].getnewaddress():25.999})
# Update psbts, should only have data for one input and not the other
psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig, False, "ALL")['psbt']
psbt1_decoded = self.nodes[0].decodepsbt(psbt1)
assert psbt1_decoded['inputs'][0] and not psbt1_decoded['inputs'][1]
# Check that BIP32 path was added
assert "bip32_derivs" in psbt1_decoded['inputs'][0]
psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig, False, "ALL", False)['psbt']
psbt2_decoded = self.nodes[0].decodepsbt(psbt2)
assert not psbt2_decoded['inputs'][0] and psbt2_decoded['inputs'][1]
# Check that BIP32 paths were not added
assert "bip32_derivs" not in psbt2_decoded['inputs'][1]
# Sign PSBTs (workaround issue #18039)
psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig)['psbt']
psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig)['psbt']
# Combine, finalize, and send the psbts
combined = self.nodes[0].combinepsbt([psbt1, psbt2])
finalized = self.nodes[0].finalizepsbt(combined)['hex']
self.nodes[0].sendrawtransaction(finalized)
self.generate(self.nodes[0], 6)
self.sync_all()
# Test additional args in walletcreatepsbt
# Make sure both pre-included and funded inputs
# have the correct sequence numbers based on
# replaceable arg
block_height = self.nodes[0].getblockcount()
unspent = self.nodes[0].listunspent()[0]
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False, "add_inputs": True}, False)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" not in psbt_in
assert_equal(decoded_psbt["tx"]["locktime"], block_height+2)
# Same construction with only locktime set and RBF explicitly enabled
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True, "add_inputs": True}, True)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" in psbt_in
assert_equal(decoded_psbt["tx"]["locktime"], block_height)
# Same construction without optional arguments
psbtx_info = self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" in psbt_in
assert_equal(decoded_psbt["tx"]["locktime"], 0)
# Same construction without optional arguments, for a node with -walletrbf=0
unspent1 = self.nodes[1].listunspent()[0]
psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height, {"add_inputs": True})
decoded_psbt = self.nodes[1].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" in psbt_in
# Make sure change address wallet does not have P2SH innerscript access to results in success
# when attempting BnB coin selection
self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False)
# Make sure the wallet's change type is respected by default
small_output = {self.nodes[0].getnewaddress():0.1}
psbtx_native = self.nodes[0].walletcreatefundedpsbt([], [small_output])
self.assert_change_type(psbtx_native, "witness_v0_keyhash")
psbtx_legacy = self.nodes[1].walletcreatefundedpsbt([], [small_output])
self.assert_change_type(psbtx_legacy, "pubkeyhash")
# Make sure the change type of the wallet can also be overwritten
psbtx_np2wkh = self.nodes[1].walletcreatefundedpsbt([], [small_output], 0, {"change_type":"p2sh-segwit"})
self.assert_change_type(psbtx_np2wkh, "scripthash")
# Make sure the change type cannot be specified if a change address is given
invalid_options = {"change_type":"legacy","changeAddress":self.nodes[0].getnewaddress()}
assert_raises_rpc_error(-8, "both change address and address type options", self.nodes[0].walletcreatefundedpsbt, [], [small_output], 0, invalid_options)
# Regression test for 14473 (mishandling of already-signed witness transaction):
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], 0, {"add_inputs": True})
complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"])
double_processed_psbt = self.nodes[0].walletprocesspsbt(complete_psbt["psbt"])
assert_equal(complete_psbt, double_processed_psbt)
# We don't care about the decode result, but decoding must succeed.
self.nodes[0].decodepsbt(double_processed_psbt["psbt"])
# Make sure unsafe inputs are included if specified
self.nodes[2].createwallet(wallet_name="unsafe")
wunsafe = self.nodes[2].get_wallet_rpc("unsafe")
self.nodes[0].sendtoaddress(wunsafe.getnewaddress(), 2)
self.sync_mempools()
assert_raises_rpc_error(-4, "Insufficient funds", wunsafe.walletcreatefundedpsbt, [], [{self.nodes[0].getnewaddress(): 1}])
wunsafe.walletcreatefundedpsbt([], [{self.nodes[0].getnewaddress(): 1}], 0, {"include_unsafe": True})
# BIP 174 Test Vectors
# Check that unknown values are just passed through
unknown_psbt = "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PAAA="
unknown_out = self.nodes[0].walletprocesspsbt(unknown_psbt)['psbt']
assert_equal(unknown_psbt, unknown_out)
# Open the data file
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_psbt.json'), encoding='utf-8') as f:
d = json.load(f)
invalids = d['invalid']
valids = d['valid']
creators = d['creator']
signers = d['signer']
combiners = d['combiner']
finalizers = d['finalizer']
extractors = d['extractor']
# Invalid PSBTs
for invalid in invalids:
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decodepsbt, invalid)
# Valid PSBTs
for valid in valids:
self.nodes[0].decodepsbt(valid)
# Creator Tests
for creator in creators:
created_tx = self.nodes[0].createpsbt(creator['inputs'], creator['outputs'])
assert_equal(created_tx, creator['result'])
# Signer tests
for i, signer in enumerate(signers):
self.nodes[2].createwallet(wallet_name="wallet{}".format(i))
wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i))
for key in signer['privkeys']:
wrpc.importprivkey(key)
signed_tx = wrpc.walletprocesspsbt(signer['psbt'])['psbt']
assert_equal(signed_tx, signer['result'])
# Combiner test
for combiner in combiners:
combined = self.nodes[2].combinepsbt(combiner['combine'])
assert_equal(combined, combiner['result'])
# Empty combiner test
assert_raises_rpc_error(-8, "Parameter 'txs' cannot be empty", self.nodes[0].combinepsbt, [])
# Finalizer test
for finalizer in finalizers:
finalized = self.nodes[2].finalizepsbt(finalizer['finalize'], False)['psbt']
assert_equal(finalized, finalizer['result'])
# Extractor test
for extractor in extractors:
extracted = self.nodes[2].finalizepsbt(extractor['extract'], True)['hex']
assert_equal(extracted, extractor['result'])
# Unload extra wallets
for i, signer in enumerate(signers):
self.nodes[2].unloadwallet("wallet{}".format(i))
# TODO: Re-enable this for segwit v1
# self.test_utxo_conversion()
# Test that psbts with p2pkh outputs are created properly
p2pkh = self.nodes[0].getnewaddress(address_type='legacy')
psbt = self.nodes[1].walletcreatefundedpsbt([], [{p2pkh : 1}], 0, {"includeWatching" : True}, True)
self.nodes[0].decodepsbt(psbt['psbt'])
# Test decoding error: invalid base64
assert_raises_rpc_error(-22, "TX decode failed invalid base64", self.nodes[0].decodepsbt, ";definitely not base64;")
# Send to all types of addresses
addr1 = self.nodes[1].getnewaddress("", "bech32")
txid1 = self.nodes[0].sendtoaddress(addr1, 11)
vout1 = find_output(self.nodes[0], txid1, 11)
addr2 = self.nodes[1].getnewaddress("", "legacy")
txid2 = self.nodes[0].sendtoaddress(addr2, 11)
vout2 = find_output(self.nodes[0], txid2, 11)
addr3 = self.nodes[1].getnewaddress("", "p2sh-segwit")
txid3 = self.nodes[0].sendtoaddress(addr3, 11)
vout3 = find_output(self.nodes[0], txid3, 11)
self.sync_all()
def test_psbt_input_keys(psbt_input, keys):
"""Check that the psbt input has only the expected keys."""
assert_equal(set(keys), set(psbt_input.keys()))
# Create a PSBT. None of the inputs are filled initially
psbt = self.nodes[1].createpsbt([{"txid":txid1, "vout":vout1},{"txid":txid2, "vout":vout2},{"txid":txid3, "vout":vout3}], {self.nodes[0].getnewaddress():32.999})
decoded = self.nodes[1].decodepsbt(psbt)
test_psbt_input_keys(decoded['inputs'][0], [])
test_psbt_input_keys(decoded['inputs'][1], [])
test_psbt_input_keys(decoded['inputs'][2], [])
# Update a PSBT with UTXOs from the node
# Bech32 inputs should be filled with witness UTXO. Other inputs should not be filled because they are non-witness
updated = self.nodes[1].utxoupdatepsbt(psbt)
decoded = self.nodes[1].decodepsbt(updated)
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo'])
test_psbt_input_keys(decoded['inputs'][1], [])
test_psbt_input_keys(decoded['inputs'][2], [])
# Try again, now while providing descriptors, making P2SH-segwit work, and causing bip32_derivs and redeem_script to be filled in
descs = [self.nodes[1].getaddressinfo(addr)['desc'] for addr in [addr1,addr2,addr3]]
updated = self.nodes[1].utxoupdatepsbt(psbt=psbt, descriptors=descs)
decoded = self.nodes[1].decodepsbt(updated)
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'bip32_derivs'])
test_psbt_input_keys(decoded['inputs'][1], [])
test_psbt_input_keys(decoded['inputs'][2], ['witness_utxo', 'bip32_derivs', 'redeem_script'])
# Two PSBTs with a common input should not be joinable
psbt1 = self.nodes[1].createpsbt([{"txid":txid1, "vout":vout1}], {self.nodes[0].getnewaddress():Decimal('10.999')})
assert_raises_rpc_error(-8, "exists in multiple PSBTs", self.nodes[1].joinpsbts, [psbt1, updated])
# Join two distinct PSBTs
addr4 = self.nodes[1].getnewaddress("", "p2sh-segwit")
txid4 = self.nodes[0].sendtoaddress(addr4, 5)
vout4 = find_output(self.nodes[0], txid4, 5)
self.generate(self.nodes[0], 6)
self.sync_all()
psbt2 = self.nodes[1].createpsbt([{"txid":txid4, "vout":vout4}], {self.nodes[0].getnewaddress():Decimal('4.999')})
psbt2 = self.nodes[1].walletprocesspsbt(psbt2)['psbt']
psbt2_decoded = self.nodes[0].decodepsbt(psbt2)
assert "final_scriptwitness" in psbt2_decoded['inputs'][0] and "final_scriptSig" in psbt2_decoded['inputs'][0]
joined = self.nodes[0].joinpsbts([psbt, psbt2])
joined_decoded = self.nodes[0].decodepsbt(joined)
assert len(joined_decoded['inputs']) == 4 and len(joined_decoded['outputs']) == 2 and "final_scriptwitness" not in joined_decoded['inputs'][3] and "final_scriptSig" not in joined_decoded['inputs'][3]
# Check that joining shuffles the inputs and outputs
# 10 attempts should be enough to get a shuffled join
shuffled = False
for _ in range(10):
shuffled_joined = self.nodes[0].joinpsbts([psbt, psbt2])
shuffled |= joined != shuffled_joined
if shuffled:
break
assert shuffled
# Newly created PSBT needs UTXOs and updating
addr = self.nodes[1].getnewaddress("", "p2sh-segwit")
txid = self.nodes[0].sendtoaddress(addr, 7)
addrinfo = self.nodes[1].getaddressinfo(addr)
blockhash = self.generate(self.nodes[0], 6)[0]
self.sync_all()
vout = find_output(self.nodes[0], txid, 7, blockhash=blockhash)
psbt = self.nodes[1].createpsbt([{"txid":txid, "vout":vout}], {self.nodes[0].getnewaddress("", "p2sh-segwit"):Decimal('6.999')})
analyzed = self.nodes[0].analyzepsbt(psbt)
assert not analyzed['inputs'][0]['has_utxo'] and not analyzed['inputs'][0]['is_final'] and analyzed['inputs'][0]['next'] == 'updater' and analyzed['next'] == 'updater'
# After update with wallet, only needs signing
updated = self.nodes[1].walletprocesspsbt(psbt, False, 'ALL', True)['psbt']
analyzed = self.nodes[0].analyzepsbt(updated)
assert analyzed['inputs'][0]['has_utxo'] and not analyzed['inputs'][0]['is_final'] and analyzed['inputs'][0]['next'] == 'signer' and analyzed['next'] == 'signer' and analyzed['inputs'][0]['missing']['signatures'][0] == addrinfo['embedded']['witness_program']
# Check fee and size things
assert analyzed['fee'] == Decimal('0.001') and analyzed['estimated_vsize'] == 134 and analyzed['estimated_feerate'] == Decimal('0.00746268')
# After signing and finalizing, needs extracting
signed = self.nodes[1].walletprocesspsbt(updated)['psbt']
analyzed = self.nodes[0].analyzepsbt(signed)
assert analyzed['inputs'][0]['has_utxo'] and analyzed['inputs'][0]['is_final'] and analyzed['next'] == 'extractor'
self.log.info("PSBT spending unspendable outputs should have error message and Creator as next")
analysis = self.nodes[0].analyzepsbt('cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWAEHYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFv8/wADXYP/7//////8JxOh0LR2HAI8AAAAAAAEBIADC6wsAAAAAF2oUt/X69ELjeX2nTof+fZ10l+OyAokDAQcJAwEHEAABAACAAAEBIADC6wsAAAAAF2oUt/X69ELjeX2nTof+fZ10l+OyAokDAQcJAwEHENkMak8AAAAA')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 spends unspendable output')
self.log.info("PSBT with invalid values should have error message and Creator as next")
analysis = self.nodes[0].analyzepsbt('cHNidP8BAHECAAAAAfA00BFgAm6tp86RowwH6BMImQNL5zXUcTT97XoLGz0BAAAAAAD/////AgD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XL87QKVAAAAABYAFPck4gF7iL4NL4wtfRAKgQbghiTUAAAAAAABAR8AgIFq49AHABYAFJUDtxf2PHo641HEOBOAIvFMNTr2AAAA')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 has invalid value')
self.log.info("PSBT with signed, but not finalized, inputs should have Finalizer as next")
analysis = self.nodes[0].analyzepsbt('cHNidP8BAHECAAAAAZYezcxdnbXoQCmrD79t/LzDgtUo9ERqixk8wgioAobrAAAAAAD9////AlDDAAAAAAAAFgAUy/UxxZuzZswcmFnN/E9DGSiHLUsuGPUFAAAAABYAFLsH5o0R38wXx+X2cCosTMCZnQ4baAAAAAABAR8A4fUFAAAAABYAFOBI2h5thf3+Lflb2LGCsVSZwsltIgIC/i4dtVARCRWtROG0HHoGcaVklzJUcwo5homgGkSNAnJHMEQCIGx7zKcMIGr7cEES9BR4Kdt/pzPTK3fKWcGyCJXb7MVnAiALOBgqlMH4GbC1HDh/HmylmO54fyEy4lKde7/BT/PWxwEBAwQBAAAAIgYC/i4dtVARCRWtROG0HHoGcaVklzJUcwo5homgGkSNAnIYDwVpQ1QAAIABAACAAAAAgAAAAAAAAAAAAAAiAgL+CIiB59NSCssOJRGiMYQK1chahgAaaJpIXE41Cyir+xgPBWlDVAAAgAEAAIAAAACAAQAAAAAAAAAA')
assert_equal(analysis['next'], 'finalizer')
analysis = self.nodes[0].analyzepsbt('cHNidP8BAHECAAAAAfA00BFgAm6tp86RowwH6BMImQNL5zXUcTT97XoLGz0BAAAAAAD/////AgCAgWrj0AcAFgAUKNw0x8HRctAgmvoevm4u1SbN7XL87QKVAAAAABYAFPck4gF7iL4NL4wtfRAKgQbghiTUAAAAAAABAR8A8gUqAQAAABYAFJUDtxf2PHo641HEOBOAIvFMNTr2AAAA')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Output amount invalid')
analysis = self.nodes[0].analyzepsbt('cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 specifies invalid prevout')
assert_raises_rpc_error(-25, 'Inputs missing or spent', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==')
# Test that we can fund psbts with external inputs specified
eckey = ECKey()
eckey.generate()
privkey = bytes_to_wif(eckey.get_bytes())
# Make a weird but signable script. sh(pkh()) descriptor accomplishes this
desc = descsum_create("sh(pkh({}))".format(privkey))
if self.options.descriptors:
res = self.nodes[0].importdescriptors([{"desc": desc, "timestamp": "now"}])
else:
res = self.nodes[0].importmulti([{"desc": desc, "timestamp": "now"}])
assert res[0]["success"]
addr = self.nodes[0].deriveaddresses(desc)[0]
addr_info = self.nodes[0].getaddressinfo(addr)
self.nodes[0].sendtoaddress(addr, 10)
self.generate(self.nodes[0], 6)
self.sync_all()
ext_utxo = self.nodes[0].listunspent(addresses=[addr])[0]
# An external input without solving data should result in an error
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[1].walletcreatefundedpsbt, [ext_utxo], {self.nodes[0].getnewaddress(): 10 + ext_utxo['amount']}, 0, {'add_inputs': True})
# But funding should work when the solving data is provided
psbt = self.nodes[1].walletcreatefundedpsbt([ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, {'add_inputs': True, "solving_data": {"pubkeys": [addr_info['pubkey']], "scripts": [addr_info["embedded"]["scriptPubKey"]]}})
signed = self.nodes[1].walletprocesspsbt(psbt['psbt'])
assert not signed['complete']
signed = self.nodes[0].walletprocesspsbt(signed['psbt'])
assert signed['complete']
self.nodes[0].finalizepsbt(signed['psbt'])
psbt = self.nodes[1].walletcreatefundedpsbt([ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, {'add_inputs': True, "solving_data":{"descriptors": [desc]}})
signed = self.nodes[1].walletprocesspsbt(psbt['psbt'])
assert not signed['complete']
signed = self.nodes[0].walletprocesspsbt(signed['psbt'])
assert signed['complete']
self.nodes[0].finalizepsbt(signed['psbt'])
if __name__ == '__main__':
PSBTTest().main()
|
test_utxo_conversion
|
VirtualContestTable.tsx
|
import { BootstrapTable, TableHeaderColumn } from "react-bootstrap-table";
import { Link } from "react-router-dom";
import * as DateUtil from "../../utils/DateUtil";
import React from "react";
import { VirtualContest } from "./types";
export default (props: { contests: VirtualContest[] }) => {
return (
<BootstrapTable
data={props.contests.sort(
(a, b) => b.start_epoch_second - a.start_epoch_second
)}
pagination
keyField="id"
height="auto"
hover
striped
search
>
|
dataField="title"
dataFormat={(title: string, contest: VirtualContest) => (
<Link to={`/contest/show/${contest.id}`}>{title}</Link>
)}
>
Title
</TableHeaderColumn>
<TableHeaderColumn dataField="memo">Description</TableHeaderColumn>
<TableHeaderColumn
dataField="start_epoch_second"
dataFormat={(_: number, contest: VirtualContest) => {
const time = DateUtil.parseSecond(contest.start_epoch_second);
return DateUtil.formatMomentDateTime(time);
}}
>
Start
</TableHeaderColumn>
<TableHeaderColumn
dataField="duration_second"
dataFormat={(_: number, contest: VirtualContest) => {
const time = DateUtil.parseSecond(
contest.start_epoch_second + contest.duration_second
);
return DateUtil.formatMomentDateTime(time);
}}
>
End
</TableHeaderColumn>
</BootstrapTable>
);
};
|
<TableHeaderColumn
|
extension_feed_item_error.pb.go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v1/errors/extension_feed_item_error.proto
package errors
import (
fmt "fmt"
math "math"
proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Enum describing possible extension feed item errors.
type ExtensionFeedItemErrorEnum_ExtensionFeedItemError int32
const (
// Enum unspecified.
ExtensionFeedItemErrorEnum_UNSPECIFIED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 0
// The received error code is not known in this version.
ExtensionFeedItemErrorEnum_UNKNOWN ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 1
// Value is not within the accepted range.
ExtensionFeedItemErrorEnum_VALUE_OUT_OF_RANGE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 2
// Url list is too long.
ExtensionFeedItemErrorEnum_URL_LIST_TOO_LONG ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 3
// Cannot have a geo targeting restriction without having geo targeting.
ExtensionFeedItemErrorEnum_CANNOT_HAVE_RESTRICTION_ON_EMPTY_GEO_TARGETING ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 4
// Cannot simultaneously set sitelink field with final urls.
ExtensionFeedItemErrorEnum_CANNOT_SET_WITH_FINAL_URLS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 5
// Must set field with final urls.
ExtensionFeedItemErrorEnum_CANNOT_SET_WITHOUT_FINAL_URLS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 6
// Phone number for a call extension is invalid.
ExtensionFeedItemErrorEnum_INVALID_PHONE_NUMBER ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 7
// Phone number for a call extension is not supported for the given country
// code.
ExtensionFeedItemErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 8
// A carrier specific number in short format is not allowed for call
// extensions.
ExtensionFeedItemErrorEnum_CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 9
// Premium rate numbers are not allowed for call extensions.
ExtensionFeedItemErrorEnum_PREMIUM_RATE_NUMBER_NOT_ALLOWED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 10
// Phone number type for a call extension is not allowed.
// For example, personal number is not allowed for a call extension in
// most regions.
ExtensionFeedItemErrorEnum_DISALLOWED_NUMBER_TYPE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 11
// Phone number for a call extension does not meet domestic format
// requirements.
ExtensionFeedItemErrorEnum_INVALID_DOMESTIC_PHONE_NUMBER_FORMAT ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 12
// Vanity phone numbers (i.e. those including letters) are not allowed for
// call extensions.
ExtensionFeedItemErrorEnum_VANITY_PHONE_NUMBER_NOT_ALLOWED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 13
// Call conversion action provided for a call extension is invalid.
ExtensionFeedItemErrorEnum_INVALID_CALL_CONVERSION_ACTION ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 14
// For a call extension, the customer is not whitelisted for call tracking.
ExtensionFeedItemErrorEnum_CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 15
// Call tracking is not supported for the given country for a call
// extension.
ExtensionFeedItemErrorEnum_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 16
// Customer hasn't consented for call recording, which is required for
// creating/updating call feed items. Please see
// https://support.google.com/google-ads/answer/7412639.
ExtensionFeedItemErrorEnum_CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 17
// App id provided for an app extension is invalid.
ExtensionFeedItemErrorEnum_INVALID_APP_ID ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 18
// Quotation marks present in the review text for a review extension.
ExtensionFeedItemErrorEnum_QUOTES_IN_REVIEW_EXTENSION_SNIPPET ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 19
// Hyphen character present in the review text for a review extension.
ExtensionFeedItemErrorEnum_HYPHENS_IN_REVIEW_EXTENSION_SNIPPET ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 20
// A blacklisted review source name or url was provided for a review
// extension.
ExtensionFeedItemErrorEnum_REVIEW_EXTENSION_SOURCE_INELIGIBLE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 21
// Review source name should not be found in the review text.
ExtensionFeedItemErrorEnum_SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 22
|
// Price extension cannot have duplicated headers.
ExtensionFeedItemErrorEnum_PRICE_EXTENSION_HAS_DUPLICATED_HEADERS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 24
// Price item cannot have duplicated header and description.
ExtensionFeedItemErrorEnum_PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 25
// Price extension has too few items.
ExtensionFeedItemErrorEnum_PRICE_EXTENSION_HAS_TOO_FEW_ITEMS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 26
// Price extension has too many items.
ExtensionFeedItemErrorEnum_PRICE_EXTENSION_HAS_TOO_MANY_ITEMS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 27
// The input value is not currently supported.
ExtensionFeedItemErrorEnum_UNSUPPORTED_VALUE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 28
// The input value is not currently supported in the selected language of an
// extension.
ExtensionFeedItemErrorEnum_UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 29
// Unknown or unsupported device preference.
ExtensionFeedItemErrorEnum_INVALID_DEVICE_PREFERENCE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 30
// Invalid feed item schedule end time (i.e., endHour = 24 and endMinute !=
// 0).
ExtensionFeedItemErrorEnum_INVALID_SCHEDULE_END ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 31
// Date time zone does not match the account's time zone.
ExtensionFeedItemErrorEnum_DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 32
// Invalid structured snippet header.
ExtensionFeedItemErrorEnum_INVALID_SNIPPETS_HEADER ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 33
// Cannot operate on removed feed item.
ExtensionFeedItemErrorEnum_CANNOT_OPERATE_ON_REMOVED_FEED_ITEM ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 34
// Phone number not supported when call tracking enabled for country.
ExtensionFeedItemErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 35
// Cannot set call_conversion_action while call_conversion_tracking_enabled
// is set to true.
ExtensionFeedItemErrorEnum_CONFLICTING_CALL_CONVERSION_SETTINGS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 36
// The type of the input extension feed item doesn't match the existing
// extension feed item.
ExtensionFeedItemErrorEnum_EXTENSION_TYPE_MISMATCH ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 37
// The oneof field extension i.e. subtype of extension feed item is
// required.
ExtensionFeedItemErrorEnum_EXTENSION_SUBTYPE_REQUIRED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 38
// The referenced feed item is not mapped to a supported extension type.
ExtensionFeedItemErrorEnum_EXTENSION_TYPE_UNSUPPORTED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 39
// Cannot operate on a Feed with more than one active FeedMapping.
ExtensionFeedItemErrorEnum_CANNOT_OPERATE_ON_FEED_WITH_MULTIPLE_MAPPINGS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 40
// Cannot operate on a Feed that has key attributes.
ExtensionFeedItemErrorEnum_CANNOT_OPERATE_ON_FEED_WITH_KEY_ATTRIBUTES ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 41
// Input price is not in a valid format.
ExtensionFeedItemErrorEnum_INVALID_PRICE_FORMAT ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 42
// The promotion time is invalid.
ExtensionFeedItemErrorEnum_PROMOTION_INVALID_TIME ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 43
// This field has too many decimal places specified.
ExtensionFeedItemErrorEnum_TOO_MANY_DECIMAL_PLACES_SPECIFIED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 44
)
var ExtensionFeedItemErrorEnum_ExtensionFeedItemError_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "VALUE_OUT_OF_RANGE",
3: "URL_LIST_TOO_LONG",
4: "CANNOT_HAVE_RESTRICTION_ON_EMPTY_GEO_TARGETING",
5: "CANNOT_SET_WITH_FINAL_URLS",
6: "CANNOT_SET_WITHOUT_FINAL_URLS",
7: "INVALID_PHONE_NUMBER",
8: "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY",
9: "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED",
10: "PREMIUM_RATE_NUMBER_NOT_ALLOWED",
11: "DISALLOWED_NUMBER_TYPE",
12: "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT",
13: "VANITY_PHONE_NUMBER_NOT_ALLOWED",
14: "INVALID_CALL_CONVERSION_ACTION",
15: "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING",
16: "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY",
17: "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED",
18: "INVALID_APP_ID",
19: "QUOTES_IN_REVIEW_EXTENSION_SNIPPET",
20: "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET",
21: "REVIEW_EXTENSION_SOURCE_INELIGIBLE",
22: "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT",
23: "INCONSISTENT_CURRENCY_CODES",
24: "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS",
25: "PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION",
26: "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS",
27: "PRICE_EXTENSION_HAS_TOO_MANY_ITEMS",
28: "UNSUPPORTED_VALUE",
29: "UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE",
30: "INVALID_DEVICE_PREFERENCE",
31: "INVALID_SCHEDULE_END",
32: "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE",
33: "INVALID_SNIPPETS_HEADER",
34: "CANNOT_OPERATE_ON_REMOVED_FEED_ITEM",
35: "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY",
36: "CONFLICTING_CALL_CONVERSION_SETTINGS",
37: "EXTENSION_TYPE_MISMATCH",
38: "EXTENSION_SUBTYPE_REQUIRED",
39: "EXTENSION_TYPE_UNSUPPORTED",
40: "CANNOT_OPERATE_ON_FEED_WITH_MULTIPLE_MAPPINGS",
41: "CANNOT_OPERATE_ON_FEED_WITH_KEY_ATTRIBUTES",
42: "INVALID_PRICE_FORMAT",
43: "PROMOTION_INVALID_TIME",
44: "TOO_MANY_DECIMAL_PLACES_SPECIFIED",
}
var ExtensionFeedItemErrorEnum_ExtensionFeedItemError_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"VALUE_OUT_OF_RANGE": 2,
"URL_LIST_TOO_LONG": 3,
"CANNOT_HAVE_RESTRICTION_ON_EMPTY_GEO_TARGETING": 4,
"CANNOT_SET_WITH_FINAL_URLS": 5,
"CANNOT_SET_WITHOUT_FINAL_URLS": 6,
"INVALID_PHONE_NUMBER": 7,
"PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY": 8,
"CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED": 9,
"PREMIUM_RATE_NUMBER_NOT_ALLOWED": 10,
"DISALLOWED_NUMBER_TYPE": 11,
"INVALID_DOMESTIC_PHONE_NUMBER_FORMAT": 12,
"VANITY_PHONE_NUMBER_NOT_ALLOWED": 13,
"INVALID_CALL_CONVERSION_ACTION": 14,
"CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING": 15,
"CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY": 16,
"CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED": 17,
"INVALID_APP_ID": 18,
"QUOTES_IN_REVIEW_EXTENSION_SNIPPET": 19,
"HYPHENS_IN_REVIEW_EXTENSION_SNIPPET": 20,
"REVIEW_EXTENSION_SOURCE_INELIGIBLE": 21,
"SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT": 22,
"INCONSISTENT_CURRENCY_CODES": 23,
"PRICE_EXTENSION_HAS_DUPLICATED_HEADERS": 24,
"PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION": 25,
"PRICE_EXTENSION_HAS_TOO_FEW_ITEMS": 26,
"PRICE_EXTENSION_HAS_TOO_MANY_ITEMS": 27,
"UNSUPPORTED_VALUE": 28,
"UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE": 29,
"INVALID_DEVICE_PREFERENCE": 30,
"INVALID_SCHEDULE_END": 31,
"DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE": 32,
"INVALID_SNIPPETS_HEADER": 33,
"CANNOT_OPERATE_ON_REMOVED_FEED_ITEM": 34,
"PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY": 35,
"CONFLICTING_CALL_CONVERSION_SETTINGS": 36,
"EXTENSION_TYPE_MISMATCH": 37,
"EXTENSION_SUBTYPE_REQUIRED": 38,
"EXTENSION_TYPE_UNSUPPORTED": 39,
"CANNOT_OPERATE_ON_FEED_WITH_MULTIPLE_MAPPINGS": 40,
"CANNOT_OPERATE_ON_FEED_WITH_KEY_ATTRIBUTES": 41,
"INVALID_PRICE_FORMAT": 42,
"PROMOTION_INVALID_TIME": 43,
"TOO_MANY_DECIMAL_PLACES_SPECIFIED": 44,
}
func (x ExtensionFeedItemErrorEnum_ExtensionFeedItemError) String() string {
return proto.EnumName(ExtensionFeedItemErrorEnum_ExtensionFeedItemError_name, int32(x))
}
func (ExtensionFeedItemErrorEnum_ExtensionFeedItemError) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_5c3cc3411bdbe205, []int{0, 0}
}
// Container for enum describing possible extension feed item error.
type ExtensionFeedItemErrorEnum struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ExtensionFeedItemErrorEnum) Reset() { *m = ExtensionFeedItemErrorEnum{} }
func (m *ExtensionFeedItemErrorEnum) String() string { return proto.CompactTextString(m) }
func (*ExtensionFeedItemErrorEnum) ProtoMessage() {}
func (*ExtensionFeedItemErrorEnum) Descriptor() ([]byte, []int) {
return fileDescriptor_5c3cc3411bdbe205, []int{0}
}
func (m *ExtensionFeedItemErrorEnum) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ExtensionFeedItemErrorEnum.Unmarshal(m, b)
}
func (m *ExtensionFeedItemErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ExtensionFeedItemErrorEnum.Marshal(b, m, deterministic)
}
func (m *ExtensionFeedItemErrorEnum) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExtensionFeedItemErrorEnum.Merge(m, src)
}
func (m *ExtensionFeedItemErrorEnum) XXX_Size() int {
return xxx_messageInfo_ExtensionFeedItemErrorEnum.Size(m)
}
func (m *ExtensionFeedItemErrorEnum) XXX_DiscardUnknown() {
xxx_messageInfo_ExtensionFeedItemErrorEnum.DiscardUnknown(m)
}
var xxx_messageInfo_ExtensionFeedItemErrorEnum proto.InternalMessageInfo
func init() {
proto.RegisterEnum("google.ads.googleads.v1.errors.ExtensionFeedItemErrorEnum_ExtensionFeedItemError", ExtensionFeedItemErrorEnum_ExtensionFeedItemError_name, ExtensionFeedItemErrorEnum_ExtensionFeedItemError_value)
proto.RegisterType((*ExtensionFeedItemErrorEnum)(nil), "google.ads.googleads.v1.errors.ExtensionFeedItemErrorEnum")
}
func init() {
proto.RegisterFile("google/ads/googleads/v1/errors/extension_feed_item_error.proto", fileDescriptor_5c3cc3411bdbe205)
}
var fileDescriptor_5c3cc3411bdbe205 = []byte{
// 1056 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x55, 0xdd, 0x8e, 0x53, 0x37,
0x10, 0x2e, 0x4b, 0x0b, 0xad, 0x29, 0x60, 0x5c, 0x58, 0x60, 0x17, 0x96, 0x12, 0xfe, 0x29, 0x24,
0xa4, 0xed, 0x45, 0x95, 0x56, 0x95, 0x1c, 0x9f, 0x49, 0x62, 0xe1, 0x63, 0x1b, 0xdb, 0x27, 0x21,
0x68, 0xa5, 0xd1, 0xb6, 0x49, 0xa3, 0x95, 0xd8, 0x64, 0xb5, 0x49, 0x51, 0x9f, 0xa2, 0x0f, 0xd1,
0xcb, 0x4a, 0x7d, 0x91, 0x3e, 0x4a, 0x1f, 0xa0, 0xd7, 0x95, 0x7d, 0x92, 0x90, 0xed, 0x86, 0xbd,
0xca, 0xc9, 0xf8, 0x9b, 0xff, 0x6f, 0x66, 0xc8, 0x8f, 0xa3, 0xc9, 0x64, 0xf4, 0x76, 0x58, 0xdb,
0x1b, 0x4c, 0x6b, 0xe5, 0x67, 0xfc, 0x7a, 0x57, 0xaf, 0x0d, 0x8f, 0x8e, 0x26, 0x47, 0xd3, 0xda,
0xf0, 0xb7, 0xd9, 0x70, 0x3c, 0xdd, 0x9f, 0x8c, 0xf1, 0x97, 0xe1, 0x70, 0x80, 0xfb, 0xb3, 0xe1,
0x01, 0xa6, 0xa7, 0xea, 0xe1, 0xd1, 0x64, 0x36, 0x61, 0x3b, 0xa5, 0x52, 0x75, 0x6f, 0x30, 0xad,
0x2e, 0xf5, 0xab, 0xef, 0xea, 0xd5, 0x52, 0x7f, 0xeb, 0xd6, 0xc2, 0xfe, 0xe1, 0x7e, 0x6d, 0x6f,
0x3c, 0x9e, 0xcc, 0xf6, 0x66, 0xfb, 0x93, 0xf1, 0xb4, 0xd4, 0xae, 0xfc, 0x75, 0x91, 0x6c, 0xc1,
0xc2, 0x43, 0x6b, 0x38, 0x1c, 0xc8, 0xd9, 0xf0, 0x00, 0xa2, 0x26, 0x8c, 0x7f, 0x3d, 0xa8, 0xfc,
0x7e, 0x91, 0x6c, 0xae, 0x7f, 0x66, 0x97, 0xc9, 0x85, 0x42, 0x7b, 0x0b, 0x42, 0xb6, 0x24, 0x64,
0xf4, 0x23, 0x76, 0x81, 0x9c, 0x2f, 0xf4, 0x4b, 0x6d, 0x7a, 0x9a, 0x9e, 0x61, 0x9b, 0x84, 0x75,
0xb9, 0x2a, 0x00, 0x4d, 0x11, 0xd0, 0xb4, 0xd0, 0x71, 0xdd, 0x06, 0xba, 0xc1, 0xae, 0x91, 0x2b,
0x85, 0x53, 0xa8, 0xa4, 0x0f, 0x18, 0x8c, 0x41, 0x65, 0x74, 0x9b, 0x9e, 0x65, 0x5f, 0x93, 0xaa,
0xe0, 0x5a, 0x9b, 0x80, 0x1d, 0xde, 0x05, 0x74, 0xe0, 0x83, 0x93, 0x22, 0x48, 0xa3, 0xd1, 0x68,
0x84, 0xdc, 0x86, 0x3e, 0xb6, 0xc1, 0x60, 0xe0, 0xae, 0x0d, 0x41, 0xea, 0x36, 0xfd, 0x98, 0xed,
0x90, 0xad, 0xb9, 0x8e, 0x87, 0x80, 0x3d, 0x19, 0x3a, 0xd8, 0x92, 0x9a, 0x2b, 0x2c, 0x9c, 0xf2,
0xf4, 0x13, 0x76, 0x97, 0xdc, 0xfe, 0xdf, 0x7b, 0x8c, 0x65, 0x05, 0x72, 0x8e, 0xdd, 0x20, 0x57,
0xa5, 0xee, 0x72, 0x25, 0x33, 0xb4, 0x1d, 0xa3, 0x01, 0x75, 0x91, 0x37, 0xc1, 0xd1, 0xf3, 0xec,
0x29, 0x79, 0xb8, 0x2a, 0xc1, 0x64, 0xa6, 0xb0, 0xd6, 0xb8, 0x00, 0x19, 0xb6, 0x8c, 0x43, 0x61,
0x0a, 0x1d, 0x5c, 0x9f, 0x7e, 0xca, 0x9e, 0x93, 0x27, 0x82, 0x3b, 0x27, 0xc1, 0xe1, 0xbc, 0x1e,
0x02, 0x7d, 0xc7, 0xb8, 0xb0, 0xaa, 0xcc, 0x95, 0x32, 0x3d, 0xc8, 0xe8, 0x67, 0xec, 0x1e, 0xb9,
0x63, 0x1d, 0xe4, 0xb2, 0xc8, 0xd1, 0xf1, 0x00, 0xeb, 0x40, 0x84, 0x6d, 0x91, 0xcd, 0x4c, 0xfa,
0xf9, 0xff, 0x05, 0x24, 0xf4, 0x2d, 0xd0, 0x0b, 0xec, 0x31, 0xb9, 0xbf, 0x88, 0x3a, 0x33, 0x39,
0xf8, 0x20, 0xc5, 0xb1, 0xf0, 0x63, 0x78, 0x39, 0x0f, 0xf4, 0xf3, 0xe8, 0xaa, 0xcb, 0xb5, 0x0c,
0x7d, 0x3c, 0x91, 0xcc, 0xc2, 0xd5, 0x45, 0x56, 0x21, 0x3b, 0x0b, 0x73, 0x82, 0x2b, 0x85, 0xc2,
0xe8, 0x2e, 0x38, 0x1f, 0x6b, 0xcf, 0x53, 0x0b, 0xe8, 0xa5, 0x94, 0x62, 0xe1, 0x83, 0xc9, 0xe7,
0xda, 0xbd, 0x8e, 0x0c, 0x10, 0x9b, 0xb8, 0x28, 0x06, 0x57, 0x2a, 0x38, 0x2e, 0x5e, 0xc6, 0xd6,
0x5c, 0x8e, 0xd5, 0x5b, 0x95, 0x9c, 0x52, 0x3d, 0xca, 0x5e, 0x90, 0x67, 0x4b, 0xd3, 0xc2, 0x68,
0x0f, 0x3a, 0x2c, 0x4d, 0xa2, 0x03, 0x61, 0x5c, 0x16, 0x4d, 0x38, 0x78, 0x55, 0x48, 0x07, 0x19,
0xbd, 0xc2, 0x18, 0xb9, 0xb4, 0x08, 0x98, 0x5b, 0x8b, 0x32, 0xa3, 0x8c, 0x3d, 0x24, 0x95, 0x57,
0x85, 0x09, 0xe0, 0x51, 0x6a, 0x74, 0xd0, 0x95, 0xd0, 0x43, 0x78, 0x1d, 0x40, 0xa7, 0x3c, 0xbc,
0x96, 0xd6, 0x42, 0xa0, 0x5f, 0xb0, 0x47, 0xe4, 0x5e, 0xa7, 0x6f, 0x3b, 0xa0, 0x4f, 0x07, 0x5e,
0x8d, 0x06, 0x4f, 0xbe, 0x9a, 0xc2, 0x09, 0x40, 0xa9, 0x41, 0xc9, 0xb6, 0x6c, 0x2a, 0xa0, 0xd7,
0x62, 0x33, 0xe6, 0x62, 0xcd, 0x73, 0x58, 0x6b, 0x34, 0xc0, 0xeb, 0x40, 0x37, 0xd9, 0x1d, 0xb2,
0x2d, 0x75, 0xcc, 0x30, 0x96, 0x4d, 0x07, 0x14, 0x85, 0x73, 0xa0, 0x45, 0x1f, 0x85, 0xc9, 0xc0,
0xd3, 0xeb, 0x89, 0x73, 0x4e, 0x0a, 0x58, 0x51, 0xed, 0x70, 0x8f, 0x59, 0x61, 0x95, 0x14, 0x3c,
0x56, 0xae, 0x03, 0x3c, 0x03, 0xe7, 0xe9, 0x0d, 0xf6, 0x2d, 0x79, 0x51, 0x62, 0x65, 0x80, 0x7c,
0x3d, 0x0c, 0xb9, 0xce, 0x30, 0x03, 0x2f, 0x9c, 0xb4, 0xa9, 0x8d, 0x37, 0xd9, 0x03, 0x72, 0x77,
0x9d, 0x87, 0x38, 0x88, 0x2d, 0xe8, 0x25, 0x5b, 0x9e, 0x6e, 0xc5, 0xdc, 0x3f, 0x04, 0xcb, 0xb9,
0xee, 0xcf, 0x71, 0xdb, 0x69, 0x98, 0xf5, 0xfb, 0xbe, 0xa6, 0x81, 0xa7, 0xb7, 0x62, 0x1e, 0x27,
0xc4, 0xb1, 0x30, 0x1e, 0x14, 0x88, 0x28, 0x51, 0x5c, 0xb7, 0x0b, 0xde, 0x06, 0x7a, 0x9b, 0xdd,
0x26, 0x37, 0x97, 0x5c, 0x86, 0x6e, 0xf4, 0x69, 0x1d, 0xb4, 0x20, 0x16, 0x06, 0xe8, 0xce, 0xea,
0x80, 0x7a, 0xd1, 0x81, 0xac, 0x50, 0x80, 0xa0, 0x33, 0x7a, 0x27, 0x3a, 0xc9, 0xe2, 0xf4, 0x04,
0x99, 0x03, 0xe6, 0x85, 0x0f, 0xd8, 0x4c, 0x4e, 0xb8, 0x48, 0xd4, 0x2a, 0xe5, 0x6f, 0x8c, 0x06,
0xfa, 0x25, 0xdb, 0x26, 0xd7, 0x97, 0x56, 0xca, 0x06, 0xfb, 0x79, 0x8d, 0xe8, 0xdd, 0xc8, 0x88,
0xf9, 0x9a, 0x30, 0x16, 0xd2, 0x40, 0x9a, 0xd8, 0xc3, 0xdc, 0x74, 0x23, 0x55, 0x01, 0xb2, 0x94,
0x2e, 0xad, 0xb0, 0x1f, 0xc8, 0x77, 0xa7, 0xac, 0x84, 0xb4, 0x7f, 0x8e, 0x91, 0x7e, 0x95, 0xe6,
0xf7, 0x22, 0x4f, 0x84, 0xd1, 0x2d, 0x15, 0xb7, 0x9a, 0x6e, 0x9f, 0x98, 0x34, 0x0f, 0x21, 0xca,
0x3d, 0xbd, 0x1f, 0xa3, 0x5d, 0xe1, 0x4e, 0xdf, 0x02, 0xe6, 0xd2, 0xe7, 0x3c, 0x88, 0x0e, 0x7d,
0x10, 0x97, 0xde, 0x0a, 0x1f, 0x8b, 0x66, 0x7a, 0x5f, 0xce, 0xc6, 0xc3, 0xe3, 0xef, 0xe9, 0x71,
0xa5, 0x15, 0xf4, 0x11, 0xab, 0x93, 0xe7, 0x27, 0xb3, 0x4d, 0x59, 0xa6, 0x04, 0xf2, 0x42, 0x05,
0x69, 0x15, 0x60, 0xce, 0xad, 0x4d, 0xf1, 0x3c, 0x66, 0x55, 0xf2, 0xf4, 0x34, 0x95, 0x97, 0xd0,
0x47, 0x1e, 0x82, 0x93, 0xcd, 0x22, 0x80, 0xa7, 0x4f, 0x8e, 0x2d, 0xd5, 0xc4, 0xa2, 0xf9, 0x3a,
0x7a, 0x1a, 0x97, 0x9a, 0x75, 0x26, 0x37, 0x69, 0xaf, 0x2f, 0x30, 0xb1, 0x51, 0xf4, 0xab, 0x48,
0xcd, 0x25, 0xbf, 0x32, 0x10, 0x32, 0xe7, 0x0a, 0xad, 0xe2, 0x02, 0x3c, 0xbe, 0x3f, 0x32, 0xcf,
0x9a, 0xff, 0x9e, 0x21, 0x95, 0x9f, 0x27, 0x07, 0xd5, 0xd3, 0x8f, 0x5e, 0x73, 0x7b, 0xfd, 0xd1,
0xb2, 0xf1, 0xe6, 0xd9, 0x33, 0x6f, 0xb2, 0xb9, 0xfa, 0x68, 0xf2, 0x76, 0x6f, 0x3c, 0xaa, 0x4e,
0x8e, 0x46, 0xb5, 0xd1, 0x70, 0x9c, 0x2e, 0xe2, 0xe2, 0x06, 0x1f, 0xee, 0x4f, 0x3f, 0x74, 0x92,
0xbf, 0x2f, 0x7f, 0xfe, 0xd8, 0x38, 0xdb, 0xe6, 0xfc, 0xcf, 0x8d, 0x9d, 0x76, 0x69, 0x8c, 0x0f,
0xa6, 0xd5, 0xf2, 0x33, 0x7e, 0x75, 0xeb, 0xd5, 0xe4, 0x72, 0xfa, 0xf7, 0x02, 0xb0, 0xcb, 0x07,
0xd3, 0xdd, 0x25, 0x60, 0xb7, 0x5b, 0xdf, 0x2d, 0x01, 0xff, 0x6c, 0x54, 0x4a, 0x69, 0xa3, 0xc1,
0x07, 0xd3, 0x46, 0x63, 0x09, 0x69, 0x34, 0xba, 0xf5, 0x46, 0xa3, 0x04, 0xfd, 0x74, 0x2e, 0x45,
0xf7, 0xcd, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x95, 0x61, 0x9f, 0x2f, 0x08, 0x00, 0x00,
}
|
// Inconsistent currency codes.
ExtensionFeedItemErrorEnum_INCONSISTENT_CURRENCY_CODES ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 23
|
main.ts
|
import { NestFactory } from '@nestjs/core';
import { AppModule } from './app.module';
import { ConfigService } from '@nestjs/config';
import { ValidationPipe } from '@nestjs/common';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
const configService = app.get(ConfigService);
app.enableCors();
|
})
);
const port = process.env.PORT || configService.get('PORT');
await app.listen(port);
console.log(`Server listening on port ${port}`);
}
bootstrap();
|
app.useGlobalPipes(
new ValidationPipe({
disableErrorMessages: true,
|
test_live_photos.py
|
import onfido
from onfido.regions import Region
import io
api = onfido.Api("<AN_API_TOKEN>", region=Region.EU)
fake_uuid = "58a9c6d2-8661-4dbd-96dc-b9b9d344a7ce"
def test_upload_photo(requests_mock):
mock_upload = requests_mock.post("https://api.eu.onfido.com/v3.2/live_photos/", json=[])
sample_file = open("sample_photo.png", "rb")
request_body = {"advanced_validation": "true"}
api.live_photo.upload(sample_file, request_body)
assert mock_upload.called is True
def test_find_live_photo(requests_mock):
|
def test_list_live_photos(requests_mock):
mock_list = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/?applicant_id={fake_uuid}", json=[])
api.live_photo.all(fake_uuid)
assert mock_list.called is True
def test_download_live_photo(requests_mock):
mock_download = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}/download", text="FAKE IMAGE BINARY", headers={"Content-type": "image/png"})
onfido_download = api.live_photo.download(fake_uuid)
assert mock_download.called is True
assert onfido_download.content_type == "image/png"
|
mock_find = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}", json=[])
api.live_photo.find(fake_uuid)
assert mock_find.called is True
|
lib.rs
|
mod error;
mod generator;
pub use self::error::Error;
|
pub use self::generator::Generator;
|
|
time.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use fmt;
use time::Duration;
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct Instant;
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct SystemTime;
pub const UNIX_EPOCH: SystemTime = SystemTime;
impl Instant {
pub fn now() -> Instant {
panic!("not supported on web assembly");
}
pub fn sub_instant(&self, _other: &Instant) -> Duration {
panic!("can't sub yet");
}
pub fn add_duration(&self, _other: &Duration) -> Instant {
panic!("can't add yet");
}
pub fn sub_duration(&self, _other: &Duration) -> Instant {
panic!("can't sub yet");
}
}
impl SystemTime {
pub fn now() -> SystemTime {
panic!("not supported on web assembly");
}
pub fn sub_time(&self, _other: &SystemTime)
-> Result<Duration, Duration> {
panic!()
}
pub fn add_duration(&self, _other: &Duration) -> SystemTime {
panic!()
}
pub fn sub_duration(&self, _other: &Duration) -> SystemTime {
panic!()
}
}
impl fmt::Debug for SystemTime {
fn
|
(&self, _f: &mut fmt::Formatter) -> fmt::Result {
panic!()
}
}
|
fmt
|
_mouse_helper.py
|
from playwright.sync_api import Page
from pathlib import Path
def install_mouse_helper(page: Page) -> None:
|
page.add_init_script(path=Path(__file__).parent.joinpath("../js/mouseHelper.js"))
|
|
index.d.ts
|
declare module '*.svg'
declare module '*.png'
declare module '*.jpg'
declare module '*.jpeg'
declare module '*.gif'
declare module '*.bmp'
declare module '*.tiff'
declare module '*.yaml'
declare module '*.json'
declare module 'vue-count-to'
|
/*
* @Description: 项目类型声明
* @Author:
*/
|
|
test_groupby.py
|
from datetime import datetime
from decimal import Decimal
from io import StringIO
import numpy as np
import pytest
from pandas.compat import IS64
from pandas.errors import PerformanceWarning
import pandas as pd
from pandas import (
Categorical,
DataFrame,
Grouper,
Index,
MultiIndex,
Series,
Timestamp,
date_range,
read_csv,
to_datetime,
)
import pandas._testing as tm
from pandas.core.base import SpecificationError
import pandas.core.common as com
def test_repr():
# GH18203
result = repr(Grouper(key="A", level="B"))
expected = "Grouper(key='A', level='B', axis=0, sort=False)"
assert result == expected
@pytest.mark.parametrize("dtype", ["int64", "int32", "float64", "float32"])
def test_basic(dtype):
data = Series(np.arange(9) // 3, index=np.arange(9), dtype=dtype)
index = np.arange(9)
np.random.shuffle(index)
data = data.reindex(index)
grouped = data.groupby(lambda x: x // 3)
for k, v in grouped:
assert len(v) == 3
agged = grouped.aggregate(np.mean)
assert agged[1] == 1
tm.assert_series_equal(agged, grouped.agg(np.mean)) # shorthand
tm.assert_series_equal(agged, grouped.mean())
tm.assert_series_equal(grouped.agg(np.sum), grouped.sum())
expected = grouped.apply(lambda x: x * x.sum())
transformed = grouped.transform(lambda x: x * x.sum())
assert transformed[7] == 12
tm.assert_series_equal(transformed, expected)
value_grouped = data.groupby(data)
tm.assert_series_equal(
value_grouped.aggregate(np.mean), agged, check_index_type=False
)
# complex agg
agged = grouped.aggregate([np.mean, np.std])
msg = r"nested renamer is not supported"
with pytest.raises(SpecificationError, match=msg):
grouped.aggregate({"one": np.mean, "two": np.std})
group_constants = {0: 10, 1: 20, 2: 30}
agged = grouped.agg(lambda x: group_constants[x.name] + x.mean())
assert agged[1] == 21
# corner cases
msg = "Must produce aggregated value"
# exception raised is type Exception
with pytest.raises(Exception, match=msg):
grouped.aggregate(lambda x: x * 2)
def test_groupby_nonobject_dtype(mframe, df_mixed_floats):
key = mframe.index.codes[0]
grouped = mframe.groupby(key)
result = grouped.sum()
expected = mframe.groupby(key.astype("O")).sum()
tm.assert_frame_equal(result, expected)
# GH 3911, mixed frame non-conversion
df = df_mixed_floats.copy()
df["value"] = range(len(df))
def max_value(group):
return group.loc[group["value"].idxmax()]
applied = df.groupby("A").apply(max_value)
result = applied.dtypes
expected = df.dtypes
tm.assert_series_equal(result, expected)
def test_groupby_return_type():
# GH2893, return a reduced type
df1 = DataFrame(
[
{"val1": 1, "val2": 20},
{"val1": 1, "val2": 19},
{"val1": 2, "val2": 27},
{"val1": 2, "val2": 12},
]
)
def func(dataf):
return dataf["val2"] - dataf["val2"].mean()
with tm.assert_produces_warning(FutureWarning):
result = df1.groupby("val1", squeeze=True).apply(func)
assert isinstance(result, Series)
df2 = DataFrame(
[
{"val1": 1, "val2": 20},
{"val1": 1, "val2": 19},
{"val1": 1, "val2": 27},
{"val1": 1, "val2": 12},
]
)
def func(dataf):
return dataf["val2"] - dataf["val2"].mean()
with tm.assert_produces_warning(FutureWarning):
result = df2.groupby("val1", squeeze=True).apply(func)
assert isinstance(result, Series)
# GH3596, return a consistent type (regression in 0.11 from 0.10.1)
df = DataFrame([[1, 1], [1, 1]], columns=["X", "Y"])
with tm.assert_produces_warning(FutureWarning):
result = df.groupby("X", squeeze=False).count()
assert isinstance(result, DataFrame)
def test_inconsistent_return_type():
# GH5592
# inconsistent return type
df = DataFrame(
{
"A": ["Tiger", "Tiger", "Tiger", "Lamb", "Lamb", "Pony", "Pony"],
"B": Series(np.arange(7), dtype="int64"),
"C": date_range("20130101", periods=7),
}
)
def f(grp):
return grp.iloc[0]
expected = df.groupby("A").first()[["B"]]
result = df.groupby("A").apply(f)[["B"]]
tm.assert_frame_equal(result, expected)
def f(grp):
if grp.name == "Tiger":
return None
return grp.iloc[0]
result = df.groupby("A").apply(f)[["B"]]
e = expected.copy()
e.loc["Tiger"] = np.nan
tm.assert_frame_equal(result, e)
def f(grp):
if grp.name == "Pony":
return None
return grp.iloc[0]
result = df.groupby("A").apply(f)[["B"]]
e = expected.copy()
e.loc["Pony"] = np.nan
tm.assert_frame_equal(result, e)
# 5592 revisited, with datetimes
def f(grp):
if grp.name == "Pony":
return None
return grp.iloc[0]
result = df.groupby("A").apply(f)[["C"]]
e = df.groupby("A").first()[["C"]]
e.loc["Pony"] = pd.NaT
tm.assert_frame_equal(result, e)
# scalar outputs
def f(grp):
if grp.name == "Pony":
return None
return grp.iloc[0].loc["C"]
result = df.groupby("A").apply(f)
e = df.groupby("A").first()["C"].copy()
e.loc["Pony"] = np.nan
e.name = None
tm.assert_series_equal(result, e)
def test_pass_args_kwargs(ts, tsframe):
def f(x, q=None, axis=0):
return np.percentile(x, q, axis=axis)
g = lambda x: np.percentile(x, 80, axis=0)
# Series
ts_grouped = ts.groupby(lambda x: x.month)
agg_result = ts_grouped.agg(np.percentile, 80, axis=0)
apply_result = ts_grouped.apply(np.percentile, 80, axis=0)
trans_result = ts_grouped.transform(np.percentile, 80, axis=0)
agg_expected = ts_grouped.quantile(0.8)
trans_expected = ts_grouped.transform(g)
tm.assert_series_equal(apply_result, agg_expected)
tm.assert_series_equal(agg_result, agg_expected)
tm.assert_series_equal(trans_result, trans_expected)
agg_result = ts_grouped.agg(f, q=80)
apply_result = ts_grouped.apply(f, q=80)
trans_result = ts_grouped.transform(f, q=80)
tm.assert_series_equal(agg_result, agg_expected)
tm.assert_series_equal(apply_result, agg_expected)
tm.assert_series_equal(trans_result, trans_expected)
# DataFrame
df_grouped = tsframe.groupby(lambda x: x.month)
agg_result = df_grouped.agg(np.percentile, 80, axis=0)
apply_result = df_grouped.apply(DataFrame.quantile, 0.8)
expected = df_grouped.quantile(0.8)
tm.assert_frame_equal(apply_result, expected, check_names=False)
tm.assert_frame_equal(agg_result, expected)
agg_result = df_grouped.agg(f, q=80)
apply_result = df_grouped.apply(DataFrame.quantile, q=0.8)
tm.assert_frame_equal(agg_result, expected)
tm.assert_frame_equal(apply_result, expected, check_names=False)
def test_len():
df = tm.makeTimeDataFrame()
grouped = df.groupby([lambda x: x.year, lambda x: x.month, lambda x: x.day])
assert len(grouped) == len(df)
grouped = df.groupby([lambda x: x.year, lambda x: x.month])
expected = len({(x.year, x.month) for x in df.index})
assert len(grouped) == expected
# issue 11016
df = DataFrame({"a": [np.nan] * 3, "b": [1, 2, 3]})
assert len(df.groupby("a")) == 0
assert len(df.groupby("b")) == 3
assert len(df.groupby(["a", "b"])) == 3
def test_basic_regression():
# regression
result = Series([1.0 * x for x in list(range(1, 10)) * 10])
data = np.random.random(1100) * 10.0
groupings = Series(data)
grouped = result.groupby(groupings)
grouped.mean()
@pytest.mark.parametrize(
"dtype", ["float64", "float32", "int64", "int32", "int16", "int8"]
)
def test_with_na_groups(dtype):
index = Index(np.arange(10))
values = Series(np.ones(10), index, dtype=dtype)
labels = Series(
[np.nan, "foo", "bar", "bar", np.nan, np.nan, "bar", "bar", np.nan, "foo"],
index=index,
)
# this SHOULD be an int
grouped = values.groupby(labels)
agged = grouped.agg(len)
expected = Series([4, 2], index=["bar", "foo"])
tm.assert_series_equal(agged, expected, check_dtype=False)
# assert issubclass(agged.dtype.type, np.integer)
# explicitly return a float from my function
def f(x):
return float(len(x))
agged = grouped.agg(f)
expected = Series([4.0, 2.0], index=["bar", "foo"])
tm.assert_series_equal(agged, expected)
def test_indices_concatenation_order():
# GH 2808
def f1(x):
y = x[(x.b % 2) == 1] ** 2
if y.empty:
multiindex = MultiIndex(levels=[[]] * 2, codes=[[]] * 2, names=["b", "c"])
res = DataFrame(columns=["a"], index=multiindex)
return res
else:
y = y.set_index(["b", "c"])
return y
def f2(x):
y = x[(x.b % 2) == 1] ** 2
if y.empty:
return DataFrame()
else:
y = y.set_index(["b", "c"])
return y
def f3(x):
y = x[(x.b % 2) == 1] ** 2
if y.empty:
multiindex = MultiIndex(
levels=[[]] * 2, codes=[[]] * 2, names=["foo", "bar"]
)
res = DataFrame(columns=["a", "b"], index=multiindex)
return res
else:
return y
df = DataFrame({"a": [1, 2, 2, 2], "b": range(4), "c": range(5, 9)})
df2 = DataFrame({"a": [3, 2, 2, 2], "b": range(4), "c": range(5, 9)})
# correct result
result1 = df.groupby("a").apply(f1)
result2 = df2.groupby("a").apply(f1)
tm.assert_frame_equal(result1, result2)
# should fail (not the same number of levels)
msg = "Cannot concat indices that do not have the same number of levels"
with pytest.raises(AssertionError, match=msg):
df.groupby("a").apply(f2)
with pytest.raises(AssertionError, match=msg):
df2.groupby("a").apply(f2)
# should fail (incorrect shape)
with pytest.raises(AssertionError, match=msg):
df.groupby("a").apply(f3)
with pytest.raises(AssertionError, match=msg):
df2.groupby("a").apply(f3)
def test_attr_wrapper(ts):
grouped = ts.groupby(lambda x: x.weekday())
result = grouped.std()
expected = grouped.agg(lambda x: np.std(x, ddof=1))
tm.assert_series_equal(result, expected)
# this is pretty cool
result = grouped.describe()
expected = {name: gp.describe() for name, gp in grouped}
expected = DataFrame(expected).T
tm.assert_frame_equal(result, expected)
# get attribute
result = grouped.dtype
expected = grouped.agg(lambda x: x.dtype)
tm.assert_series_equal(result, expected)
# make sure raises error
msg = "'SeriesGroupBy' object has no attribute 'foo'"
with pytest.raises(AttributeError, match=msg):
getattr(grouped, "foo")
def test_frame_groupby(tsframe):
grouped = tsframe.groupby(lambda x: x.weekday())
# aggregate
aggregated = grouped.aggregate(np.mean)
assert len(aggregated) == 5
assert len(aggregated.columns) == 4
# by string
tscopy = tsframe.copy()
tscopy["weekday"] = [x.weekday() for x in tscopy.index]
stragged = tscopy.groupby("weekday").aggregate(np.mean)
tm.assert_frame_equal(stragged, aggregated, check_names=False)
# transform
grouped = tsframe.head(30).groupby(lambda x: x.weekday())
transformed = grouped.transform(lambda x: x - x.mean())
assert len(transformed) == 30
assert len(transformed.columns) == 4
# transform propagate
transformed = grouped.transform(lambda x: x.mean())
for name, group in grouped:
mean = group.mean()
for idx in group.index:
tm.assert_series_equal(transformed.xs(idx), mean, check_names=False)
# iterate
for weekday, group in grouped:
assert group.index[0].weekday() == weekday
# groups / group_indices
groups = grouped.groups
indices = grouped.indices
for k, v in groups.items():
samething = tsframe.index.take(indices[k])
assert (samething == v).all()
def test_frame_groupby_columns(tsframe):
mapping = {"A": 0, "B": 0, "C": 1, "D": 1}
grouped = tsframe.groupby(mapping, axis=1)
# aggregate
aggregated = grouped.aggregate(np.mean)
assert len(aggregated) == len(tsframe)
assert len(aggregated.columns) == 2
# transform
tf = lambda x: x - x.mean()
groupedT = tsframe.T.groupby(mapping, axis=0)
tm.assert_frame_equal(groupedT.transform(tf).T, grouped.transform(tf))
# iterate
for k, v in grouped:
assert len(v.columns) == 2
def test_frame_set_name_single(df):
grouped = df.groupby("A")
result = grouped.mean()
assert result.index.name == "A"
result = df.groupby("A", as_index=False).mean()
assert result.index.name != "A"
result = grouped.agg(np.mean)
assert result.index.name == "A"
result = grouped.agg({"C": np.mean, "D": np.std})
assert result.index.name == "A"
result = grouped["C"].mean()
assert result.index.name == "A"
result = grouped["C"].agg(np.mean)
assert result.index.name == "A"
result = grouped["C"].agg([np.mean, np.std])
assert result.index.name == "A"
msg = r"nested renamer is not supported"
with pytest.raises(SpecificationError, match=msg):
grouped["C"].agg({"foo": np.mean, "bar": np.std})
def test_multi_func(df):
col1 = df["A"]
col2 = df["B"]
grouped = df.groupby([col1.get, col2.get])
agged = grouped.mean()
expected = df.groupby(["A", "B"]).mean()
# TODO groupby get drops names
tm.assert_frame_equal(
agged.loc[:, ["C", "D"]], expected.loc[:, ["C", "D"]], check_names=False
)
# some "groups" with no data
df = DataFrame(
{
"v1": np.random.randn(6),
"v2": np.random.randn(6),
"k1": np.array(["b", "b", "b", "a", "a", "a"]),
"k2": np.array(["1", "1", "1", "2", "2", "2"]),
},
index=["one", "two", "three", "four", "five", "six"],
)
# only verify that it works for now
grouped = df.groupby(["k1", "k2"])
grouped.agg(np.sum)
def test_multi_key_multiple_functions(df):
grouped = df.groupby(["A", "B"])["C"]
agged = grouped.agg([np.mean, np.std])
expected = DataFrame({"mean": grouped.agg(np.mean), "std": grouped.agg(np.std)})
tm.assert_frame_equal(agged, expected)
def test_frame_multi_key_function_list():
data = DataFrame(
{
"A": [
"foo",
"foo",
"foo",
"foo",
"bar",
"bar",
"bar",
"bar",
"foo",
"foo",
"foo",
],
"B": [
"one",
"one",
"one",
"two",
"one",
"one",
"one",
"two",
"two",
"two",
"one",
],
"C": [
"dull",
"dull",
"shiny",
"dull",
"dull",
"shiny",
"shiny",
"dull",
"shiny",
"shiny",
"shiny",
],
"D": np.random.randn(11),
"E": np.random.randn(11),
"F": np.random.randn(11),
}
)
grouped = data.groupby(["A", "B"])
funcs = [np.mean, np.std]
agged = grouped.agg(funcs)
expected = pd.concat(
[grouped["D"].agg(funcs), grouped["E"].agg(funcs), grouped["F"].agg(funcs)],
keys=["D", "E", "F"],
axis=1,
)
assert isinstance(agged.index, MultiIndex)
assert isinstance(expected.index, MultiIndex)
tm.assert_frame_equal(agged, expected)
@pytest.mark.parametrize("op", [lambda x: x.sum(), lambda x: x.mean()])
def test_groupby_multiple_columns(df, op):
data = df
grouped = data.groupby(["A", "B"])
result1 = op(grouped)
keys = []
values = []
for n1, gp1 in data.groupby("A"):
for n2, gp2 in gp1.groupby("B"):
keys.append((n1, n2))
values.append(op(gp2.loc[:, ["C", "D"]]))
mi = MultiIndex.from_tuples(keys, names=["A", "B"])
expected = pd.concat(values, axis=1).T
expected.index = mi
# a little bit crude
for col in ["C", "D"]:
result_col = op(grouped[col])
pivoted = result1[col]
exp = expected[col]
tm.assert_series_equal(result_col, exp)
tm.assert_series_equal(pivoted, exp)
# test single series works the same
result = data["C"].groupby([data["A"], data["B"]]).mean()
expected = data.groupby(["A", "B"]).mean()["C"]
tm.assert_series_equal(result, expected)
def test_as_index_select_column():
# GH 5764
df = DataFrame([[1, 2], [1, 4], [5, 6]], columns=["A", "B"])
result = df.groupby("A", as_index=False)["B"].get_group(1)
expected = Series([2, 4], name="B")
tm.assert_series_equal(result, expected)
result = df.groupby("A", as_index=False)["B"].apply(lambda x: x.cumsum())
expected = Series(
[2, 6, 6], name="B", index=MultiIndex.from_tuples([(0, 0), (0, 1), (1, 2)])
)
tm.assert_series_equal(result, expected)
def test_groupby_as_index_select_column_sum_empty_df():
# GH 35246
df = DataFrame(columns=["A", "B", "C"])
left = df.groupby(by="A", as_index=False)["B"].sum()
assert type(left) is DataFrame
assert left.to_dict() == {"A": {}, "B": {}}
def test_groupby_as_index_agg(df):
grouped = df.groupby("A", as_index=False)
# single-key
result = grouped.agg(np.mean)
expected = grouped.mean()
tm.assert_frame_equal(result, expected)
result2 = grouped.agg({"C": np.mean, "D": np.sum})
expected2 = grouped.mean()
expected2["D"] = grouped.sum()["D"]
tm.assert_frame_equal(result2, expected2)
grouped = df.groupby("A", as_index=True)
msg = r"nested renamer is not supported"
with pytest.raises(SpecificationError, match=msg):
grouped["C"].agg({"Q": np.sum})
# multi-key
grouped = df.groupby(["A", "B"], as_index=False)
result = grouped.agg(np.mean)
expected = grouped.mean()
tm.assert_frame_equal(result, expected)
result2 = grouped.agg({"C": np.mean, "D": np.sum})
expected2 = grouped.mean()
expected2["D"] = grouped.sum()["D"]
tm.assert_frame_equal(result2, expected2)
expected3 = grouped["C"].sum()
expected3 = DataFrame(expected3).rename(columns={"C": "Q"})
result3 = grouped["C"].agg({"Q": np.sum})
tm.assert_frame_equal(result3, expected3)
# GH7115 & GH8112 & GH8582
df = DataFrame(np.random.randint(0, 100, (50, 3)), columns=["jim", "joe", "jolie"])
ts = Series(np.random.randint(5, 10, 50), name="jim")
gr = df.groupby(ts)
gr.nth(0) # invokes set_selection_from_grouper internally
tm.assert_frame_equal(gr.apply(sum), df.groupby(ts).apply(sum))
for attr in ["mean", "max", "count", "idxmax", "cumsum", "all"]:
gr = df.groupby(ts, as_index=False)
left = getattr(gr, attr)()
gr = df.groupby(ts.values, as_index=True)
right = getattr(gr, attr)().reset_index(drop=True)
tm.assert_frame_equal(left, right)
def test_ops_not_as_index(reduction_func):
# GH 10355, 21090
# Using as_index=False should not modify grouped column
if reduction_func in ("corrwith",):
pytest.skip("Test not applicable")
if reduction_func in ("nth", "ngroup"):
pytest.skip("Skip until behavior is determined (GH #5755)")
df = DataFrame(np.random.randint(0, 5, size=(100, 2)), columns=["a", "b"])
expected = getattr(df.groupby("a"), reduction_func)()
if reduction_func == "size":
expected = expected.rename("size")
expected = expected.reset_index()
g = df.groupby("a", as_index=False)
result = getattr(g, reduction_func)()
tm.assert_frame_equal(result, expected)
result = g.agg(reduction_func)
tm.assert_frame_equal(result, expected)
result = getattr(g["b"], reduction_func)()
tm.assert_frame_equal(result, expected)
result = g["b"].agg(reduction_func)
tm.assert_frame_equal(result, expected)
def test_as_index_series_return_frame(df):
grouped = df.groupby("A", as_index=False)
grouped2 = df.groupby(["A", "B"], as_index=False)
result = grouped["C"].agg(np.sum)
expected = grouped.agg(np.sum).loc[:, ["A", "C"]]
assert isinstance(result, DataFrame)
tm.assert_frame_equal(result, expected)
result2 = grouped2["C"].agg(np.sum)
expected2 = grouped2.agg(np.sum).loc[:, ["A", "B", "C"]]
assert isinstance(result2, DataFrame)
tm.assert_frame_equal(result2, expected2)
result = grouped["C"].sum()
expected = grouped.sum().loc[:, ["A", "C"]]
assert isinstance(result, DataFrame)
tm.assert_frame_equal(result, expected)
result2 = grouped2["C"].sum()
expected2 = grouped2.sum().loc[:, ["A", "B", "C"]]
assert isinstance(result2, DataFrame)
tm.assert_frame_equal(result2, expected2)
def test_as_index_series_column_slice_raises(df):
# GH15072
grouped = df.groupby("A", as_index=False)
msg = r"Column\(s\) C already selected"
with pytest.raises(IndexError, match=msg):
grouped["C"].__getitem__("D")
def test_groupby_as_index_cython(df):
data = df
# single-key
grouped = data.groupby("A", as_index=False)
result = grouped.mean()
expected = data.groupby(["A"]).mean()
expected.insert(0, "A", expected.index)
expected.index = np.arange(len(expected))
tm.assert_frame_equal(result, expected)
# multi-key
grouped = data.groupby(["A", "B"], as_index=False)
result = grouped.mean()
expected = data.groupby(["A", "B"]).mean()
arrays = list(zip(*expected.index.values))
expected.insert(0, "A", arrays[0])
expected.insert(1, "B", arrays[1])
expected.index = np.arange(len(expected))
tm.assert_frame_equal(result, expected)
def test_groupby_as_index_series_scalar(df):
grouped = df.groupby(["A", "B"], as_index=False)
# GH #421
result = grouped["C"].agg(len)
expected = grouped.agg(len).loc[:, ["A", "B", "C"]]
tm.assert_frame_equal(result, expected)
def test_groupby_as_index_corner(df, ts):
msg = "as_index=False only valid with DataFrame"
with pytest.raises(TypeError, match=msg):
ts.groupby(lambda x: x.weekday(), as_index=False)
msg = "as_index=False only valid for axis=0"
with pytest.raises(ValueError, match=msg):
df.groupby(lambda x: x.lower(), as_index=False, axis=1)
def test_groupby_multiple_key(df):
df = tm.makeTimeDataFrame()
grouped = df.groupby([lambda x: x.year, lambda x: x.month, lambda x: x.day])
agged = grouped.sum()
tm.assert_almost_equal(df.values, agged.values)
grouped = df.T.groupby(
[lambda x: x.year, lambda x: x.month, lambda x: x.day], axis=1
)
agged = grouped.agg(lambda x: x.sum())
tm.assert_index_equal(agged.index, df.columns)
tm.assert_almost_equal(df.T.values, agged.values)
agged = grouped.agg(lambda x: x.sum())
tm.assert_almost_equal(df.T.values, agged.values)
def test_groupby_multi_corner(df):
# test that having an all-NA column doesn't mess you up
df = df.copy()
df["bad"] = np.nan
agged = df.groupby(["A", "B"]).mean()
expected = df.groupby(["A", "B"]).mean()
expected["bad"] = np.nan
tm.assert_frame_equal(agged, expected)
def test_omit_nuisance(df):
grouped = df.groupby("A")
result = grouped.mean()
expected = df.loc[:, ["A", "C", "D"]].groupby("A").mean()
tm.assert_frame_equal(result, expected)
agged = grouped.agg(np.mean)
exp = grouped.mean()
tm.assert_frame_equal(agged, exp)
df = df.loc[:, ["A", "C", "D"]]
df["E"] = datetime.now()
grouped = df.groupby("A")
result = grouped.agg(np.sum)
expected = grouped.sum()
tm.assert_frame_equal(result, expected)
# won't work with axis = 1
grouped = df.groupby({"A": 0, "C": 0, "D": 1, "E": 1}, axis=1)
msg = "'DatetimeArray' does not implement reduction 'sum'"
with pytest.raises(TypeError, match=msg):
grouped.agg(lambda x: x.sum(0, numeric_only=False))
def test_omit_nuisance_sem(df):
# GH 38774 - sem should work with nuisance columns
grouped = df.groupby("A")
result = grouped.sem()
expected = df.loc[:, ["A", "C", "D"]].groupby("A").sem()
tm.assert_frame_equal(result, expected)
def test_omit_nuisance_python_multiple(three_group):
grouped = three_group.groupby(["A", "B"])
agged = grouped.agg(np.mean)
exp = grouped.mean()
tm.assert_frame_equal(agged, exp)
def test_empty_groups_corner(mframe):
# handle empty groups
df = DataFrame(
{
"k1": np.array(["b", "b", "b", "a", "a", "a"]),
"k2": np.array(["1", "1", "1", "2", "2", "2"]),
"k3": ["foo", "bar"] * 3,
"v1": np.random.randn(6),
"v2": np.random.randn(6),
}
)
grouped = df.groupby(["k1", "k2"])
result = grouped.agg(np.mean)
expected = grouped.mean()
tm.assert_frame_equal(result, expected)
grouped = mframe[3:5].groupby(level=0)
agged = grouped.apply(lambda x: x.mean())
agged_A = grouped["A"].apply(np.mean)
tm.assert_series_equal(agged["A"], agged_A)
assert agged.index.name == "first"
def test_nonsense_func():
df = DataFrame([0])
msg = r"unsupported operand type\(s\) for \+: 'int' and 'str'"
with pytest.raises(TypeError, match=msg):
df.groupby(lambda x: x + "foo")
def test_wrap_aggregated_output_multindex(mframe):
df = mframe.T
df["baz", "two"] = "peekaboo"
keys = [np.array([0, 0, 1]), np.array([0, 0, 1])]
agged = df.groupby(keys).agg(np.mean)
assert isinstance(agged.columns, MultiIndex)
def aggfun(ser):
if ser.name == ("foo", "one"):
raise TypeError
else:
return ser.sum()
agged2 = df.groupby(keys).aggregate(aggfun)
assert len(agged2.columns) + 1 == len(df.columns)
def test_groupby_level_apply(mframe):
result = mframe.groupby(level=0).count()
assert result.index.name == "first"
result = mframe.groupby(level=1).count()
assert result.index.name == "second"
result = mframe["A"].groupby(level=0).count()
assert result.index.name == "first"
def test_groupby_level_mapper(mframe):
deleveled = mframe.reset_index()
|
result0 = mframe.groupby(mapper0, level=0).sum()
result1 = mframe.groupby(mapper1, level=1).sum()
mapped_level0 = np.array([mapper0.get(x) for x in deleveled["first"]])
mapped_level1 = np.array([mapper1.get(x) for x in deleveled["second"]])
expected0 = mframe.groupby(mapped_level0).sum()
expected1 = mframe.groupby(mapped_level1).sum()
expected0.index.name, expected1.index.name = "first", "second"
tm.assert_frame_equal(result0, expected0)
tm.assert_frame_equal(result1, expected1)
def test_groupby_level_nonmulti():
# GH 1313, GH 13901
s = Series([1, 2, 3, 10, 4, 5, 20, 6], Index([1, 2, 3, 1, 4, 5, 2, 6], name="foo"))
expected = Series([11, 22, 3, 4, 5, 6], Index(range(1, 7), name="foo"))
result = s.groupby(level=0).sum()
tm.assert_series_equal(result, expected)
result = s.groupby(level=[0]).sum()
tm.assert_series_equal(result, expected)
result = s.groupby(level=-1).sum()
tm.assert_series_equal(result, expected)
result = s.groupby(level=[-1]).sum()
tm.assert_series_equal(result, expected)
msg = "level > 0 or level < -1 only valid with MultiIndex"
with pytest.raises(ValueError, match=msg):
s.groupby(level=1)
with pytest.raises(ValueError, match=msg):
s.groupby(level=-2)
msg = "No group keys passed!"
with pytest.raises(ValueError, match=msg):
s.groupby(level=[])
msg = "multiple levels only valid with MultiIndex"
with pytest.raises(ValueError, match=msg):
s.groupby(level=[0, 0])
with pytest.raises(ValueError, match=msg):
s.groupby(level=[0, 1])
msg = "level > 0 or level < -1 only valid with MultiIndex"
with pytest.raises(ValueError, match=msg):
s.groupby(level=[1])
def test_groupby_complex():
# GH 12902
a = Series(data=np.arange(4) * (1 + 2j), index=[0, 0, 1, 1])
expected = Series((1 + 2j, 5 + 10j))
result = a.groupby(level=0).sum()
tm.assert_series_equal(result, expected)
with tm.assert_produces_warning(FutureWarning):
result = a.sum(level=0)
tm.assert_series_equal(result, expected)
def test_groupby_series_indexed_differently():
s1 = Series(
[5.0, -9.0, 4.0, 100.0, -5.0, 55.0, 6.7],
index=Index(["a", "b", "c", "d", "e", "f", "g"]),
)
s2 = Series(
[1.0, 1.0, 4.0, 5.0, 5.0, 7.0], index=Index(["a", "b", "d", "f", "g", "h"])
)
grouped = s1.groupby(s2)
agged = grouped.mean()
exp = s1.groupby(s2.reindex(s1.index).get).mean()
tm.assert_series_equal(agged, exp)
def test_groupby_with_hier_columns():
tuples = list(
zip(
*[
["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"],
["one", "two", "one", "two", "one", "two", "one", "two"],
]
)
)
index = MultiIndex.from_tuples(tuples)
columns = MultiIndex.from_tuples(
[("A", "cat"), ("B", "dog"), ("B", "cat"), ("A", "dog")]
)
df = DataFrame(np.random.randn(8, 4), index=index, columns=columns)
result = df.groupby(level=0).mean()
tm.assert_index_equal(result.columns, columns)
result = df.groupby(level=0, axis=1).mean()
tm.assert_index_equal(result.index, df.index)
result = df.groupby(level=0).agg(np.mean)
tm.assert_index_equal(result.columns, columns)
result = df.groupby(level=0).apply(lambda x: x.mean())
tm.assert_index_equal(result.columns, columns)
result = df.groupby(level=0, axis=1).agg(lambda x: x.mean(1))
tm.assert_index_equal(result.columns, Index(["A", "B"]))
tm.assert_index_equal(result.index, df.index)
# add a nuisance column
sorted_columns, _ = columns.sortlevel(0)
df["A", "foo"] = "bar"
result = df.groupby(level=0).mean()
tm.assert_index_equal(result.columns, df.columns[:-1])
def test_grouping_ndarray(df):
grouped = df.groupby(df["A"].values)
result = grouped.sum()
expected = df.groupby("A").sum()
tm.assert_frame_equal(
result, expected, check_names=False
) # Note: no names when grouping by value
def test_groupby_wrong_multi_labels():
data = """index,foo,bar,baz,spam,data
0,foo1,bar1,baz1,spam2,20
1,foo1,bar2,baz1,spam3,30
2,foo2,bar2,baz1,spam2,40
3,foo1,bar1,baz2,spam1,50
4,foo3,bar1,baz2,spam1,60"""
data = read_csv(StringIO(data), index_col=0)
grouped = data.groupby(["foo", "bar", "baz", "spam"])
result = grouped.agg(np.mean)
expected = grouped.mean()
tm.assert_frame_equal(result, expected)
def test_groupby_series_with_name(df):
result = df.groupby(df["A"]).mean()
result2 = df.groupby(df["A"], as_index=False).mean()
assert result.index.name == "A"
assert "A" in result2
result = df.groupby([df["A"], df["B"]]).mean()
result2 = df.groupby([df["A"], df["B"]], as_index=False).mean()
assert result.index.names == ("A", "B")
assert "A" in result2
assert "B" in result2
def test_seriesgroupby_name_attr(df):
# GH 6265
result = df.groupby("A")["C"]
assert result.count().name == "C"
assert result.mean().name == "C"
testFunc = lambda x: np.sum(x) * 2
assert result.agg(testFunc).name == "C"
def test_consistency_name():
# GH 12363
df = DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "two", "two", "two", "one", "two"],
"C": np.random.randn(8) + 1.0,
"D": np.arange(8),
}
)
expected = df.groupby(["A"]).B.count()
result = df.B.groupby(df.A).count()
tm.assert_series_equal(result, expected)
def test_groupby_name_propagation(df):
# GH 6124
def summarize(df, name=None):
return Series({"count": 1, "mean": 2, "omissions": 3}, name=name)
def summarize_random_name(df):
# Provide a different name for each Series. In this case, groupby
# should not attempt to propagate the Series name since they are
# inconsistent.
return Series({"count": 1, "mean": 2, "omissions": 3}, name=df.iloc[0]["A"])
metrics = df.groupby("A").apply(summarize)
assert metrics.columns.name is None
metrics = df.groupby("A").apply(summarize, "metrics")
assert metrics.columns.name == "metrics"
metrics = df.groupby("A").apply(summarize_random_name)
assert metrics.columns.name is None
def test_groupby_nonstring_columns():
df = DataFrame([np.arange(10) for x in range(10)])
grouped = df.groupby(0)
result = grouped.mean()
expected = df.groupby(df[0]).mean()
tm.assert_frame_equal(result, expected)
def test_groupby_mixed_type_columns():
# GH 13432, unorderable types in py3
df = DataFrame([[0, 1, 2]], columns=["A", "B", 0])
expected = DataFrame([[1, 2]], columns=["B", 0], index=Index([0], name="A"))
result = df.groupby("A").first()
tm.assert_frame_equal(result, expected)
result = df.groupby("A").sum()
tm.assert_frame_equal(result, expected)
# TODO: Ensure warning isn't emitted in the first place
@pytest.mark.filterwarnings("ignore:Mean of:RuntimeWarning")
def test_cython_grouper_series_bug_noncontig():
arr = np.empty((100, 100))
arr.fill(np.nan)
obj = Series(arr[:, 0])
inds = np.tile(range(10), 10)
result = obj.groupby(inds).agg(Series.median)
assert result.isna().all()
def test_series_grouper_noncontig_index():
index = Index(tm.rands_array(10, 100))
values = Series(np.random.randn(50), index=index[::2])
labels = np.random.randint(0, 5, 50)
# it works!
grouped = values.groupby(labels)
# accessing the index elements causes segfault
f = lambda x: len(set(map(id, x.index)))
grouped.agg(f)
def test_convert_objects_leave_decimal_alone():
s = Series(range(5))
labels = np.array(["a", "b", "c", "d", "e"], dtype="O")
def convert_fast(x):
return Decimal(str(x.mean()))
def convert_force_pure(x):
# base will be length 0
assert len(x.values.base) > 0
return Decimal(str(x.mean()))
grouped = s.groupby(labels)
result = grouped.agg(convert_fast)
assert result.dtype == np.object_
assert isinstance(result[0], Decimal)
result = grouped.agg(convert_force_pure)
assert result.dtype == np.object_
assert isinstance(result[0], Decimal)
def test_groupby_dtype_inference_empty():
# GH 6733
df = DataFrame({"x": [], "range": np.arange(0, dtype="int64")})
assert df["x"].dtype == np.float64
result = df.groupby("x").first()
exp_index = Index([], name="x", dtype=np.float64)
expected = DataFrame({"range": Series([], index=exp_index, dtype="int64")})
tm.assert_frame_equal(result, expected, by_blocks=True)
def test_groupby_unit64_float_conversion():
# GH: 30859 groupby converts unit64 to floats sometimes
df = DataFrame({"first": [1], "second": [1], "value": [16148277970000000000]})
result = df.groupby(["first", "second"])["value"].max()
expected = Series(
[16148277970000000000],
MultiIndex.from_product([[1], [1]], names=["first", "second"]),
name="value",
)
tm.assert_series_equal(result, expected)
def test_groupby_list_infer_array_like(df):
result = df.groupby(list(df["A"])).mean()
expected = df.groupby(df["A"]).mean()
tm.assert_frame_equal(result, expected, check_names=False)
with pytest.raises(KeyError, match=r"^'foo'$"):
df.groupby(list(df["A"][:-1]))
# pathological case of ambiguity
df = DataFrame({"foo": [0, 1], "bar": [3, 4], "val": np.random.randn(2)})
result = df.groupby(["foo", "bar"]).mean()
expected = df.groupby([df["foo"], df["bar"]]).mean()[["val"]]
def test_groupby_keys_same_size_as_index():
# GH 11185
freq = "s"
index = date_range(
start=Timestamp("2015-09-29T11:34:44-0700"), periods=2, freq=freq
)
df = DataFrame([["A", 10], ["B", 15]], columns=["metric", "values"], index=index)
result = df.groupby([Grouper(level=0, freq=freq), "metric"]).mean()
expected = df.set_index([df.index, "metric"])
tm.assert_frame_equal(result, expected)
def test_groupby_one_row():
# GH 11741
msg = r"^'Z'$"
df1 = DataFrame(np.random.randn(1, 4), columns=list("ABCD"))
with pytest.raises(KeyError, match=msg):
df1.groupby("Z")
df2 = DataFrame(np.random.randn(2, 4), columns=list("ABCD"))
with pytest.raises(KeyError, match=msg):
df2.groupby("Z")
def test_groupby_nat_exclude():
# GH 6992
df = DataFrame(
{
"values": np.random.randn(8),
"dt": [
np.nan,
Timestamp("2013-01-01"),
np.nan,
Timestamp("2013-02-01"),
np.nan,
Timestamp("2013-02-01"),
np.nan,
Timestamp("2013-01-01"),
],
"str": [np.nan, "a", np.nan, "a", np.nan, "a", np.nan, "b"],
}
)
grouped = df.groupby("dt")
expected = [Index([1, 7]), Index([3, 5])]
keys = sorted(grouped.groups.keys())
assert len(keys) == 2
for k, e in zip(keys, expected):
# grouped.groups keys are np.datetime64 with system tz
# not to be affected by tz, only compare values
tm.assert_index_equal(grouped.groups[k], e)
# confirm obj is not filtered
tm.assert_frame_equal(grouped.grouper.groupings[0].obj, df)
assert grouped.ngroups == 2
expected = {
Timestamp("2013-01-01 00:00:00"): np.array([1, 7], dtype=np.intp),
Timestamp("2013-02-01 00:00:00"): np.array([3, 5], dtype=np.intp),
}
for k in grouped.indices:
tm.assert_numpy_array_equal(grouped.indices[k], expected[k])
tm.assert_frame_equal(grouped.get_group(Timestamp("2013-01-01")), df.iloc[[1, 7]])
tm.assert_frame_equal(grouped.get_group(Timestamp("2013-02-01")), df.iloc[[3, 5]])
with pytest.raises(KeyError, match=r"^NaT$"):
grouped.get_group(pd.NaT)
nan_df = DataFrame(
{"nan": [np.nan, np.nan, np.nan], "nat": [pd.NaT, pd.NaT, pd.NaT]}
)
assert nan_df["nan"].dtype == "float64"
assert nan_df["nat"].dtype == "datetime64[ns]"
for key in ["nan", "nat"]:
grouped = nan_df.groupby(key)
assert grouped.groups == {}
assert grouped.ngroups == 0
assert grouped.indices == {}
with pytest.raises(KeyError, match=r"^nan$"):
grouped.get_group(np.nan)
with pytest.raises(KeyError, match=r"^NaT$"):
grouped.get_group(pd.NaT)
def test_groupby_two_group_keys_all_nan():
# GH #36842: Grouping over two group keys shouldn't raise an error
df = DataFrame({"a": [np.nan, np.nan], "b": [np.nan, np.nan], "c": [1, 2]})
result = df.groupby(["a", "b"]).indices
assert result == {}
def test_groupby_2d_malformed():
d = DataFrame(index=range(2))
d["group"] = ["g1", "g2"]
d["zeros"] = [0, 0]
d["ones"] = [1, 1]
d["label"] = ["l1", "l2"]
tmp = d.groupby(["group"]).mean()
res_values = np.array([[0, 1], [0, 1]], dtype=np.int64)
tm.assert_index_equal(tmp.columns, Index(["zeros", "ones"]))
tm.assert_numpy_array_equal(tmp.values, res_values)
def test_int32_overflow():
B = np.concatenate((np.arange(10000), np.arange(10000), np.arange(5000)))
A = np.arange(25000)
df = DataFrame({"A": A, "B": B, "C": A, "D": B, "E": np.random.randn(25000)})
left = df.groupby(["A", "B", "C", "D"]).sum()
right = df.groupby(["D", "C", "B", "A"]).sum()
assert len(left) == len(right)
def test_groupby_sort_multi():
df = DataFrame(
{
"a": ["foo", "bar", "baz"],
"b": [3, 2, 1],
"c": [0, 1, 2],
"d": np.random.randn(3),
}
)
tups = [tuple(row) for row in df[["a", "b", "c"]].values]
tups = com.asarray_tuplesafe(tups)
result = df.groupby(["a", "b", "c"], sort=True).sum()
tm.assert_numpy_array_equal(result.index.values, tups[[1, 2, 0]])
tups = [tuple(row) for row in df[["c", "a", "b"]].values]
tups = com.asarray_tuplesafe(tups)
result = df.groupby(["c", "a", "b"], sort=True).sum()
tm.assert_numpy_array_equal(result.index.values, tups)
tups = [tuple(x) for x in df[["b", "c", "a"]].values]
tups = com.asarray_tuplesafe(tups)
result = df.groupby(["b", "c", "a"], sort=True).sum()
tm.assert_numpy_array_equal(result.index.values, tups[[2, 1, 0]])
df = DataFrame(
{"a": [0, 1, 2, 0, 1, 2], "b": [0, 0, 0, 1, 1, 1], "d": np.random.randn(6)}
)
grouped = df.groupby(["a", "b"])["d"]
result = grouped.sum()
def _check_groupby(df, result, keys, field, f=lambda x: x.sum()):
tups = [tuple(row) for row in df[keys].values]
tups = com.asarray_tuplesafe(tups)
expected = f(df.groupby(tups)[field])
for k, v in expected.items():
assert result[k] == v
_check_groupby(df, result, ["a", "b"], "d")
def test_dont_clobber_name_column():
df = DataFrame(
{"key": ["a", "a", "a", "b", "b", "b"], "name": ["foo", "bar", "baz"] * 2}
)
result = df.groupby("key").apply(lambda x: x)
tm.assert_frame_equal(result, df)
def test_skip_group_keys():
tsf = tm.makeTimeDataFrame()
grouped = tsf.groupby(lambda x: x.month, group_keys=False)
result = grouped.apply(lambda x: x.sort_values(by="A")[:3])
pieces = [group.sort_values(by="A")[:3] for key, group in grouped]
expected = pd.concat(pieces)
tm.assert_frame_equal(result, expected)
grouped = tsf["A"].groupby(lambda x: x.month, group_keys=False)
result = grouped.apply(lambda x: x.sort_values()[:3])
pieces = [group.sort_values()[:3] for key, group in grouped]
expected = pd.concat(pieces)
tm.assert_series_equal(result, expected)
def test_no_nonsense_name(float_frame):
# GH #995
s = float_frame["C"].copy()
s.name = None
result = s.groupby(float_frame["A"]).agg(np.sum)
assert result.name is None
def test_multifunc_sum_bug():
# GH #1065
x = DataFrame(np.arange(9).reshape(3, 3))
x["test"] = 0
x["fl"] = [1.3, 1.5, 1.6]
grouped = x.groupby("test")
result = grouped.agg({"fl": "sum", 2: "size"})
assert result["fl"].dtype == np.float64
def test_handle_dict_return_value(df):
def f(group):
return {"max": group.max(), "min": group.min()}
def g(group):
return Series({"max": group.max(), "min": group.min()})
result = df.groupby("A")["C"].apply(f)
expected = df.groupby("A")["C"].apply(g)
assert isinstance(result, Series)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("grouper", ["A", ["A", "B"]])
def test_set_group_name(df, grouper):
def f(group):
assert group.name is not None
return group
def freduce(group):
assert group.name is not None
return group.sum()
def foo(x):
return freduce(x)
grouped = df.groupby(grouper)
# make sure all these work
grouped.apply(f)
grouped.aggregate(freduce)
grouped.aggregate({"C": freduce, "D": freduce})
grouped.transform(f)
grouped["C"].apply(f)
grouped["C"].aggregate(freduce)
grouped["C"].aggregate([freduce, foo])
grouped["C"].transform(f)
def test_group_name_available_in_inference_pass():
# gh-15062
df = DataFrame({"a": [0, 0, 1, 1, 2, 2], "b": np.arange(6)})
names = []
def f(group):
names.append(group.name)
return group.copy()
df.groupby("a", sort=False, group_keys=False).apply(f)
expected_names = [0, 1, 2]
assert names == expected_names
def test_no_dummy_key_names(df):
# see gh-1291
result = df.groupby(df["A"].values).sum()
assert result.index.name is None
result = df.groupby([df["A"].values, df["B"].values]).sum()
assert result.index.names == (None, None)
def test_groupby_sort_multiindex_series():
# series multiindex groupby sort argument was not being passed through
# _compress_group_index
# GH 9444
index = MultiIndex(
levels=[[1, 2], [1, 2]],
codes=[[0, 0, 0, 0, 1, 1], [1, 1, 0, 0, 0, 0]],
names=["a", "b"],
)
mseries = Series([0, 1, 2, 3, 4, 5], index=index)
index = MultiIndex(
levels=[[1, 2], [1, 2]], codes=[[0, 0, 1], [1, 0, 0]], names=["a", "b"]
)
mseries_result = Series([0, 2, 4], index=index)
result = mseries.groupby(level=["a", "b"], sort=False).first()
tm.assert_series_equal(result, mseries_result)
result = mseries.groupby(level=["a", "b"], sort=True).first()
tm.assert_series_equal(result, mseries_result.sort_index())
def test_groupby_reindex_inside_function():
periods = 1000
ind = date_range(start="2012/1/1", freq="5min", periods=periods)
df = DataFrame({"high": np.arange(periods), "low": np.arange(periods)}, index=ind)
def agg_before(func, fix=False):
"""
Run an aggregate func on the subset of data.
"""
def _func(data):
d = data.loc[data.index.map(lambda x: x.hour < 11)].dropna()
if fix:
data[data.index[0]]
if len(d) == 0:
return None
return func(d)
return _func
grouped = df.groupby(lambda x: datetime(x.year, x.month, x.day))
closure_bad = grouped.agg({"high": agg_before(np.max)})
closure_good = grouped.agg({"high": agg_before(np.max, True)})
tm.assert_frame_equal(closure_bad, closure_good)
def test_groupby_multiindex_missing_pair():
# GH9049
df = DataFrame(
{
"group1": ["a", "a", "a", "b"],
"group2": ["c", "c", "d", "c"],
"value": [1, 1, 1, 5],
}
)
df = df.set_index(["group1", "group2"])
df_grouped = df.groupby(level=["group1", "group2"], sort=True)
res = df_grouped.agg("sum")
idx = MultiIndex.from_tuples(
[("a", "c"), ("a", "d"), ("b", "c")], names=["group1", "group2"]
)
exp = DataFrame([[2], [1], [5]], index=idx, columns=["value"])
tm.assert_frame_equal(res, exp)
def test_groupby_multiindex_not_lexsorted():
# GH 11640
# define the lexsorted version
lexsorted_mi = MultiIndex.from_tuples(
[("a", ""), ("b1", "c1"), ("b2", "c2")], names=["b", "c"]
)
lexsorted_df = DataFrame([[1, 3, 4]], columns=lexsorted_mi)
assert lexsorted_df.columns._is_lexsorted()
# define the non-lexsorted version
not_lexsorted_df = DataFrame(
columns=["a", "b", "c", "d"], data=[[1, "b1", "c1", 3], [1, "b2", "c2", 4]]
)
not_lexsorted_df = not_lexsorted_df.pivot_table(
index="a", columns=["b", "c"], values="d"
)
not_lexsorted_df = not_lexsorted_df.reset_index()
assert not not_lexsorted_df.columns._is_lexsorted()
# compare the results
tm.assert_frame_equal(lexsorted_df, not_lexsorted_df)
expected = lexsorted_df.groupby("a").mean()
with tm.assert_produces_warning(PerformanceWarning):
result = not_lexsorted_df.groupby("a").mean()
tm.assert_frame_equal(expected, result)
# a transforming function should work regardless of sort
# GH 14776
df = DataFrame(
{"x": ["a", "a", "b", "a"], "y": [1, 1, 2, 2], "z": [1, 2, 3, 4]}
).set_index(["x", "y"])
assert not df.index._is_lexsorted()
for level in [0, 1, [0, 1]]:
for sort in [False, True]:
result = df.groupby(level=level, sort=sort).apply(DataFrame.drop_duplicates)
expected = df
tm.assert_frame_equal(expected, result)
result = (
df.sort_index()
.groupby(level=level, sort=sort)
.apply(DataFrame.drop_duplicates)
)
expected = df.sort_index()
tm.assert_frame_equal(expected, result)
def test_index_label_overlaps_location():
# checking we don't have any label/location confusion in the
# wake of GH5375
df = DataFrame(list("ABCDE"), index=[2, 0, 2, 1, 1])
g = df.groupby(list("ababb"))
actual = g.filter(lambda x: len(x) > 2)
expected = df.iloc[[1, 3, 4]]
tm.assert_frame_equal(actual, expected)
ser = df[0]
g = ser.groupby(list("ababb"))
actual = g.filter(lambda x: len(x) > 2)
expected = ser.take([1, 3, 4])
tm.assert_series_equal(actual, expected)
# ... and again, with a generic Index of floats
df.index = df.index.astype(float)
g = df.groupby(list("ababb"))
actual = g.filter(lambda x: len(x) > 2)
expected = df.iloc[[1, 3, 4]]
tm.assert_frame_equal(actual, expected)
ser = df[0]
g = ser.groupby(list("ababb"))
actual = g.filter(lambda x: len(x) > 2)
expected = ser.take([1, 3, 4])
tm.assert_series_equal(actual, expected)
def test_transform_doesnt_clobber_ints():
# GH 7972
n = 6
x = np.arange(n)
df = DataFrame({"a": x // 2, "b": 2.0 * x, "c": 3.0 * x})
df2 = DataFrame({"a": x // 2 * 1.0, "b": 2.0 * x, "c": 3.0 * x})
gb = df.groupby("a")
result = gb.transform("mean")
gb2 = df2.groupby("a")
expected = gb2.transform("mean")
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"sort_column",
["ints", "floats", "strings", ["ints", "floats"], ["ints", "strings"]],
)
@pytest.mark.parametrize(
"group_column", ["int_groups", "string_groups", ["int_groups", "string_groups"]]
)
def test_groupby_preserves_sort(sort_column, group_column):
# Test to ensure that groupby always preserves sort order of original
# object. Issue #8588 and #9651
df = DataFrame(
{
"int_groups": [3, 1, 0, 1, 0, 3, 3, 3],
"string_groups": ["z", "a", "z", "a", "a", "g", "g", "g"],
"ints": [8, 7, 4, 5, 2, 9, 1, 1],
"floats": [2.3, 5.3, 6.2, -2.4, 2.2, 1.1, 1.1, 5],
"strings": ["z", "d", "a", "e", "word", "word2", "42", "47"],
}
)
# Try sorting on different types and with different group types
df = df.sort_values(by=sort_column)
g = df.groupby(group_column)
def test_sort(x):
tm.assert_frame_equal(x, x.sort_values(by=sort_column))
g.apply(test_sort)
def test_pivot_table_values_key_error():
# This test is designed to replicate the error in issue #14938
df = DataFrame(
{
"eventDate": date_range(datetime.today(), periods=20, freq="M").tolist(),
"thename": range(0, 20),
}
)
df["year"] = df.set_index("eventDate").index.year
df["month"] = df.set_index("eventDate").index.month
with pytest.raises(KeyError, match="'badname'"):
df.reset_index().pivot_table(
index="year", columns="month", values="badname", aggfunc="count"
)
@pytest.mark.parametrize("columns", ["C", ["C"]])
@pytest.mark.parametrize("keys", [["A"], ["A", "B"]])
@pytest.mark.parametrize(
"values",
[
[True],
[0],
[0.0],
["a"],
Categorical([0]),
[to_datetime(0)],
date_range(0, 1, 1, tz="US/Eastern"),
pd.array([0], dtype="Int64"),
pd.array([0], dtype="Float64"),
pd.array([False], dtype="boolean"),
],
)
@pytest.mark.parametrize("method", ["attr", "agg", "apply"])
@pytest.mark.parametrize(
"op", ["idxmax", "idxmin", "mad", "min", "max", "sum", "prod", "skew"]
)
def test_empty_groupby(columns, keys, values, method, op, request):
# GH8093 & GH26411
if isinstance(values, Categorical) and len(keys) == 1 and method == "apply":
mark = pytest.mark.xfail(raises=TypeError, match="'str' object is not callable")
request.node.add_marker(mark)
elif (
isinstance(values, Categorical)
and len(keys) == 1
and op in ["idxmax", "idxmin"]
):
mark = pytest.mark.xfail(
raises=ValueError, match="attempt to get arg(min|max) of an empty sequence"
)
request.node.add_marker(mark)
elif (
isinstance(values, Categorical)
and len(keys) == 1
and not isinstance(columns, list)
):
mark = pytest.mark.xfail(
raises=TypeError, match="'Categorical' does not implement"
)
request.node.add_marker(mark)
elif (
isinstance(values, Categorical)
and len(keys) == 1
and op in ["mad", "min", "max", "sum", "prod", "skew"]
):
mark = pytest.mark.xfail(
raises=AssertionError, match="(DataFrame|Series) are different"
)
request.node.add_marker(mark)
elif (
isinstance(values, Categorical)
and len(keys) == 2
and op in ["min", "max", "sum"]
and method != "apply"
):
mark = pytest.mark.xfail(
raises=AssertionError, match="(DataFrame|Series) are different"
)
request.node.add_marker(mark)
elif (
isinstance(values, pd.core.arrays.BooleanArray)
and op in ["sum", "prod"]
and method != "apply"
):
mark = pytest.mark.xfail(
raises=AssertionError, match="(DataFrame|Series) are different"
)
request.node.add_marker(mark)
override_dtype = None
if isinstance(values[0], bool) and op in ("prod", "sum") and method != "apply":
# sum/product of bools is an integer
override_dtype = "int64"
df = DataFrame({"A": values, "B": values, "C": values}, columns=list("ABC"))
if hasattr(values, "dtype"):
# check that we did the construction right
assert (df.dtypes == values.dtype).all()
df = df.iloc[:0]
gb = df.groupby(keys)[columns]
if method == "attr":
result = getattr(gb, op)()
else:
result = getattr(gb, method)(op)
expected = df.set_index(keys)[columns]
if override_dtype is not None:
expected = expected.astype(override_dtype)
if len(keys) == 1:
expected.index.name = keys[0]
tm.assert_equal(result, expected)
def test_tuple_as_grouping():
# https://github.com/pandas-dev/pandas/issues/18314
df = DataFrame(
{
("a", "b"): [1, 1, 1, 1],
"a": [2, 2, 2, 2],
"b": [2, 2, 2, 2],
"c": [1, 1, 1, 1],
}
)
with pytest.raises(KeyError, match=r"('a', 'b')"):
df[["a", "b", "c"]].groupby(("a", "b"))
result = df.groupby(("a", "b"))["c"].sum()
expected = Series([4], name="c", index=Index([1], name=("a", "b")))
tm.assert_series_equal(result, expected)
def test_tuple_correct_keyerror():
# https://github.com/pandas-dev/pandas/issues/18798
df = DataFrame(1, index=range(3), columns=MultiIndex.from_product([[1, 2], [3, 4]]))
with pytest.raises(KeyError, match=r"^\(7, 8\)$"):
df.groupby((7, 8)).mean()
def test_groupby_agg_ohlc_non_first():
# GH 21716
df = DataFrame(
[[1], [1]],
columns=["foo"],
index=date_range("2018-01-01", periods=2, freq="D"),
)
expected = DataFrame(
[[1, 1, 1, 1, 1], [1, 1, 1, 1, 1]],
columns=MultiIndex.from_tuples(
(
("foo", "sum", "foo"),
("foo", "ohlc", "open"),
("foo", "ohlc", "high"),
("foo", "ohlc", "low"),
("foo", "ohlc", "close"),
)
),
index=date_range("2018-01-01", periods=2, freq="D"),
)
result = df.groupby(Grouper(freq="D")).agg(["sum", "ohlc"])
tm.assert_frame_equal(result, expected)
def test_groupby_multiindex_nat():
# GH 9236
values = [
(pd.NaT, "a"),
(datetime(2012, 1, 2), "a"),
(datetime(2012, 1, 2), "b"),
(datetime(2012, 1, 3), "a"),
]
mi = MultiIndex.from_tuples(values, names=["date", None])
ser = Series([3, 2, 2.5, 4], index=mi)
result = ser.groupby(level=1).mean()
expected = Series([3.0, 2.5], index=["a", "b"])
tm.assert_series_equal(result, expected)
def test_groupby_empty_list_raises():
# GH 5289
values = zip(range(10), range(10))
df = DataFrame(values, columns=["apple", "b"])
msg = "Grouper and axis must be same length"
with pytest.raises(ValueError, match=msg):
df.groupby([[]])
def test_groupby_multiindex_series_keys_len_equal_group_axis():
# GH 25704
index_array = [["x", "x"], ["a", "b"], ["k", "k"]]
index_names = ["first", "second", "third"]
ri = MultiIndex.from_arrays(index_array, names=index_names)
s = Series(data=[1, 2], index=ri)
result = s.groupby(["first", "third"]).sum()
index_array = [["x"], ["k"]]
index_names = ["first", "third"]
ei = MultiIndex.from_arrays(index_array, names=index_names)
expected = Series([3], index=ei)
tm.assert_series_equal(result, expected)
def test_groupby_groups_in_BaseGrouper():
# GH 26326
# Test if DataFrame grouped with a pandas.Grouper has correct groups
mi = MultiIndex.from_product([["A", "B"], ["C", "D"]], names=["alpha", "beta"])
df = DataFrame({"foo": [1, 2, 1, 2], "bar": [1, 2, 3, 4]}, index=mi)
result = df.groupby([Grouper(level="alpha"), "beta"])
expected = df.groupby(["alpha", "beta"])
assert result.groups == expected.groups
result = df.groupby(["beta", Grouper(level="alpha")])
expected = df.groupby(["beta", "alpha"])
assert result.groups == expected.groups
@pytest.mark.parametrize("group_name", ["x", ["x"]])
def test_groupby_axis_1(group_name):
# GH 27614
df = DataFrame(
np.arange(12).reshape(3, 4), index=[0, 1, 0], columns=[10, 20, 10, 20]
)
df.index.name = "y"
df.columns.name = "x"
results = df.groupby(group_name, axis=1).sum()
expected = df.T.groupby(group_name).sum().T
tm.assert_frame_equal(results, expected)
# test on MI column
iterables = [["bar", "baz", "foo"], ["one", "two"]]
mi = MultiIndex.from_product(iterables=iterables, names=["x", "x1"])
df = DataFrame(np.arange(18).reshape(3, 6), index=[0, 1, 0], columns=mi)
results = df.groupby(group_name, axis=1).sum()
expected = df.T.groupby(group_name).sum().T
tm.assert_frame_equal(results, expected)
@pytest.mark.parametrize(
"op, expected",
[
(
"shift",
{
"time": [
None,
None,
Timestamp("2019-01-01 12:00:00"),
Timestamp("2019-01-01 12:30:00"),
None,
None,
]
},
),
(
"bfill",
{
"time": [
Timestamp("2019-01-01 12:00:00"),
Timestamp("2019-01-01 12:30:00"),
Timestamp("2019-01-01 14:00:00"),
Timestamp("2019-01-01 14:30:00"),
Timestamp("2019-01-01 14:00:00"),
Timestamp("2019-01-01 14:30:00"),
]
},
),
(
"ffill",
{
"time": [
Timestamp("2019-01-01 12:00:00"),
Timestamp("2019-01-01 12:30:00"),
Timestamp("2019-01-01 12:00:00"),
Timestamp("2019-01-01 12:30:00"),
Timestamp("2019-01-01 14:00:00"),
Timestamp("2019-01-01 14:30:00"),
]
},
),
],
)
def test_shift_bfill_ffill_tz(tz_naive_fixture, op, expected):
# GH19995, GH27992: Check that timezone does not drop in shift, bfill, and ffill
tz = tz_naive_fixture
data = {
"id": ["A", "B", "A", "B", "A", "B"],
"time": [
Timestamp("2019-01-01 12:00:00"),
Timestamp("2019-01-01 12:30:00"),
None,
None,
Timestamp("2019-01-01 14:00:00"),
Timestamp("2019-01-01 14:30:00"),
],
}
df = DataFrame(data).assign(time=lambda x: x.time.dt.tz_localize(tz))
grouped = df.groupby("id")
result = getattr(grouped, op)()
expected = DataFrame(expected).assign(time=lambda x: x.time.dt.tz_localize(tz))
tm.assert_frame_equal(result, expected)
def test_groupby_only_none_group():
# see GH21624
# this was crashing with "ValueError: Length of passed values is 1, index implies 0"
df = DataFrame({"g": [None], "x": 1})
actual = df.groupby("g")["x"].transform("sum")
expected = Series([np.nan], name="x")
tm.assert_series_equal(actual, expected)
def test_groupby_duplicate_index():
# GH#29189 the groupby call here used to raise
ser = Series([2, 5, 6, 8], index=[2.0, 4.0, 4.0, 5.0])
gb = ser.groupby(level=0)
result = gb.mean()
expected = Series([2, 5.5, 8], index=[2.0, 4.0, 5.0])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"idx", [Index(["a", "a"]), MultiIndex.from_tuples((("a", "a"), ("a", "a")))]
)
@pytest.mark.filterwarnings("ignore:tshift is deprecated:FutureWarning")
def test_dup_labels_output_shape(groupby_func, idx):
if groupby_func in {"size", "ngroup", "cumcount"}:
pytest.skip("Not applicable")
df = DataFrame([[1, 1]], columns=idx)
grp_by = df.groupby([0])
args = []
if groupby_func in {"fillna", "nth"}:
args.append(0)
elif groupby_func == "corrwith":
args.append(df)
elif groupby_func == "tshift":
df.index = [Timestamp("today")]
args.extend([1, "D"])
result = getattr(grp_by, groupby_func)(*args)
assert result.shape == (1, 2)
tm.assert_index_equal(result.columns, idx)
def test_groupby_crash_on_nunique(axis):
# Fix following 30253
df = DataFrame({("A", "B"): [1, 2], ("A", "C"): [1, 3], ("D", "B"): [0, 0]})
axis_number = df._get_axis_number(axis)
if not axis_number:
df = df.T
result = df.groupby(axis=axis_number, level=0).nunique()
expected = DataFrame({"A": [1, 2], "D": [1, 1]})
if not axis_number:
expected = expected.T
tm.assert_frame_equal(result, expected)
# same thing, but empty columns
gb = df[[]].groupby(axis=axis_number, level=0)
res = gb.nunique()
exp = expected[[]]
tm.assert_frame_equal(res, exp)
def test_groupby_list_level():
# GH 9790
expected = DataFrame(np.arange(0, 9).reshape(3, 3))
result = expected.groupby(level=[0]).mean()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"max_seq_items, expected",
[
(5, "{0: [0], 1: [1], 2: [2], 3: [3], 4: [4]}"),
(4, "{0: [0], 1: [1], 2: [2], 3: [3], ...}"),
(1, "{0: [0], ...}"),
],
)
def test_groups_repr_truncates(max_seq_items, expected):
# GH 1135
df = DataFrame(np.random.randn(5, 1))
df["a"] = df.index
with pd.option_context("display.max_seq_items", max_seq_items):
result = df.groupby("a").groups.__repr__()
assert result == expected
result = df.groupby(np.array(df.a)).groups.__repr__()
assert result == expected
def test_group_on_two_row_multiindex_returns_one_tuple_key():
# GH 18451
df = DataFrame([{"a": 1, "b": 2, "c": 99}, {"a": 1, "b": 2, "c": 88}])
df = df.set_index(["a", "b"])
grp = df.groupby(["a", "b"])
result = grp.indices
expected = {(1, 2): np.array([0, 1], dtype=np.int64)}
assert len(result) == 1
key = (1, 2)
assert (result[key] == expected[key]).all()
@pytest.mark.parametrize(
"klass, attr, value",
[
(DataFrame, "level", "a"),
(DataFrame, "as_index", False),
(DataFrame, "sort", False),
(DataFrame, "group_keys", False),
(DataFrame, "squeeze", True),
(DataFrame, "observed", True),
(DataFrame, "dropna", False),
pytest.param(
Series,
"axis",
1,
marks=pytest.mark.xfail(
reason="GH 35443: Attribute currently not passed on to series"
),
),
(Series, "level", "a"),
(Series, "as_index", False),
(Series, "sort", False),
(Series, "group_keys", False),
(Series, "squeeze", True),
(Series, "observed", True),
(Series, "dropna", False),
],
)
@pytest.mark.filterwarnings(
"ignore:The `squeeze` parameter is deprecated:FutureWarning"
)
def test_subsetting_columns_keeps_attrs(klass, attr, value):
# GH 9959 - When subsetting columns, don't drop attributes
df = DataFrame({"a": [1], "b": [2], "c": [3]})
if attr != "axis":
df = df.set_index("a")
expected = df.groupby("a", **{attr: value})
result = expected[["b"]] if klass is DataFrame else expected["b"]
assert getattr(result, attr) == getattr(expected, attr)
def test_subsetting_columns_axis_1():
# GH 37725
g = DataFrame({"A": [1], "B": [2], "C": [3]}).groupby([0, 0, 1], axis=1)
match = "Cannot subset columns when using axis=1"
with pytest.raises(ValueError, match=match):
g[["A", "B"]].sum()
@pytest.mark.parametrize("func", ["sum", "any", "shift"])
def test_groupby_column_index_name_lost(func):
# GH: 29764 groupby loses index sometimes
expected = Index(["a"], name="idx")
df = DataFrame([[1]], columns=expected)
df_grouped = df.groupby([1])
result = getattr(df_grouped, func)().columns
tm.assert_index_equal(result, expected)
def test_groupby_duplicate_columns():
# GH: 31735
df = DataFrame(
{"A": ["f", "e", "g", "h"], "B": ["a", "b", "c", "d"], "C": [1, 2, 3, 4]}
).astype(object)
df.columns = ["A", "B", "B"]
result = df.groupby([0, 0, 0, 0]).min()
expected = DataFrame([["e", "a", 1]], columns=["A", "B", "B"])
tm.assert_frame_equal(result, expected)
def test_groupby_series_with_tuple_name():
# GH 37755
ser = Series([1, 2, 3, 4], index=[1, 1, 2, 2], name=("a", "a"))
ser.index.name = ("b", "b")
result = ser.groupby(level=0).last()
expected = Series([2, 4], index=[1, 2], name=("a", "a"))
expected.index.name = ("b", "b")
tm.assert_series_equal(result, expected)
@pytest.mark.xfail(not IS64, reason="GH#38778: fail on 32-bit system")
@pytest.mark.parametrize(
"func, values", [("sum", [97.0, 98.0]), ("mean", [24.25, 24.5])]
)
def test_groupby_numerical_stability_sum_mean(func, values):
# GH#38778
data = [1e16, 1e16, 97, 98, -5e15, -5e15, -5e15, -5e15]
df = DataFrame({"group": [1, 2] * 4, "a": data, "b": data})
result = getattr(df.groupby("group"), func)()
expected = DataFrame({"a": values, "b": values}, index=Index([1, 2], name="group"))
tm.assert_frame_equal(result, expected)
@pytest.mark.xfail(not IS64, reason="GH#38778: fail on 32-bit system")
def test_groupby_numerical_stability_cumsum():
# GH#38934
data = [1e16, 1e16, 97, 98, -5e15, -5e15, -5e15, -5e15]
df = DataFrame({"group": [1, 2] * 4, "a": data, "b": data})
result = df.groupby("group").cumsum()
exp_data = (
[1e16] * 2 + [1e16 + 96, 1e16 + 98] + [5e15 + 97, 5e15 + 98] + [97.0, 98.0]
)
expected = DataFrame({"a": exp_data, "b": exp_data})
tm.assert_frame_equal(result, expected, check_exact=True)
def test_groupby_mean_duplicate_index(rand_series_with_duplicate_datetimeindex):
dups = rand_series_with_duplicate_datetimeindex
result = dups.groupby(level=0).mean()
expected = dups.groupby(dups.index).mean()
tm.assert_series_equal(result, expected)
|
mapper0 = {"foo": 0, "bar": 0, "baz": 1, "qux": 1}
mapper1 = {"one": 0, "two": 0, "three": 1}
|
VORDInstance.py
|
class VORDInstance:
def __init__(self, video_id, video_path, frame_count, fps, width, height,
subject_objects, trajectories, relation_instances):
self.video_id = video_id
self.video_path = video_path
self.frame_count = frame_count
self.fps = fps
self.height = height
self.width = width
self.subject_objects = subject_objects
self.trajectories = trajectories
self.relation_instances = relation_instances
def __repr__(self):
return "VORD Instance: video_id=" + str(self.video_id)
def include_object(self, object_label):
for each_so in self.subject_objects:
if each_so['category'].lower() == object_label.lower():
return True
return False
def get_object_trajs(self, object_label):
if self.include_object(object_label):
trajs_list = []
for each_so in self.subject_objects:
if object_label == each_so['category']:
obj_tid = each_so['tid']
for each_traj in self.trajectories:
for each_traj_obj in each_traj:
if obj_tid == each_traj_obj['tid']:
trajs_list.append(each_traj_obj)
return trajs_list
else:
return None
def get_object_relations_list(self):
objects_list = []
relations_list = []
for each_so in self.subject_objects:
objects_list.append(each_so['category'])
for each_rel in self.relation_instances:
relations_list.append(each_rel['predicate'])
# print("Video " + str(self.video_id) + " has "
# + str(len(objects_list)) + " objects and " +
# str(len(relations_list)) + " relations.")
return objects_list, relations_list
def
|
(self):
categorys = {}
for each_os in self.subject_objects:
categorys[each_os['tid']] = each_os['category']
triplet_list = []
for each_pred in self.relation_instances:
each_trip = (categorys[each_pred['subject_tid']],
each_pred['predicate'],
categorys[each_pred['object_tid']])
triplet_list.append(each_trip)
return triplet_list
|
get_triplet_list
|
motor.go
|
// Package m702 provides r/w access to registers of M702 unidrive motors.
package m702
import (
"fmt"
"strconv"
"strings"
"time"
"github.com/goburrow/modbus"
)
const (
enable32bits = 0x4000 // enables the 32b mode of M702 modbus interface
nregs = 2 // number of 16b registers to read/write
)
// Parameter is a menu parameter in the M702 unidrive manual.
type Parameter struct {
Index [3]int
Title string
DefVal string
RW bool
Data [4]byte
}
// MBReg returns the (32b) modbus register value corresponding to this parameter.
func (p *Parameter) MBReg() uint16 {
return uint16(p.Index[1]*100 + p.Index[2] - 1 + enable32bits)
}
func (p Parameter) String() string {
return fmt.Sprintf("%02d.%02d.%03d", p.Index[0], p.Index[1], p.Index[2])
}
// NewParameter creates a parameter from a [slot.]menu.index string.
func NewParameter(menu string) (Parameter, error)
|
// Motor represents a M702 unidrive motor.
type Motor struct {
Addr string
Timeout time.Duration
}
// New returns a new M702 motor.
func New(addr string) Motor {
return Motor{
Addr: addr,
Timeout: 5 * time.Second,
}
}
func (m *Motor) client(slave byte) *modbus.TCPClientHandler {
c := modbus.NewTCPClientHandler(m.Addr)
c.SlaveId = slave
c.Timeout = m.Timeout
return c
}
// ReadParam reads parameter p's value from the motor.
func (m *Motor) ReadParam(p *Parameter) error {
c := m.client(byte(p.Index[0]))
defer c.Close()
cli := modbus.NewClient(c)
o, err := cli.ReadHoldingRegisters(p.MBReg(), nregs)
if err != nil {
return err
}
copy(p.Data[:], o)
return err
}
// WriteParam writes parameter p's value to the motor.
func (m *Motor) WriteParam(p Parameter) error {
c := m.client(byte(p.Index[0]))
defer c.Close()
o, err := modbus.NewClient(c).WriteMultipleRegisters(p.MBReg(), nregs, p.Data[:])
if err != nil {
return err
}
if o[1] != nregs {
return fmt.Errorf(
"m702: invalid write at Pr-%v. expected %d, got %d",
p, nregs, o[1],
)
}
return err
}
|
{
var err error
var p Parameter
var (
slot = 0
m = 0
i = 0
)
toks := strings.Split(menu, ".")
itoks := make([]int, len(toks))
for j, tok := range toks {
v, err := strconv.Atoi(tok)
if err != nil {
return p, err
}
itoks[j] = v
}
switch len(itoks) {
case 2:
m = itoks[0]
i = itoks[1]
case 3:
slot = itoks[0]
m = itoks[1]
i = itoks[2]
default:
return p, fmt.Errorf(
"m702: invalid menu value (too many/too few dots) [pr=%s]",
menu,
)
}
if slot > 4 || slot < 0 {
return p, fmt.Errorf(
"m702: invalid slot value (%d) [pr=%s]",
slot,
menu,
)
}
if m > 162 {
return p, fmt.Errorf("m702: invalid menu value (%d>162) [pr=%s]", m, menu)
}
if i >= 100 {
return p, fmt.Errorf("m702: invalid index value (%d>=100) [pr=%s]", i, menu)
}
p.Index = [3]int{slot, m, i}
return p, err
}
|
meta_test.go
|
package utils
import (
"testing"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/version"
fakediscovery "k8s.io/client-go/discovery/fake"
ktesting "k8s.io/client-go/testing"
)
func TestParseVersion(t *testing.T) {
tests := []struct {
input version.Info
expected ServerVersion
error bool
}{
{
input: version.Info{Major: "1", Minor: "6"},
expected: ServerVersion{Major: 1, Minor: 6},
},
{
input: version.Info{Major: "1", Minor: "70"},
expected: ServerVersion{Major: 1, Minor: 70},
},
{
input: version.Info{Major: "1", Minor: "6x"},
error: true,
},
{
input: version.Info{Major: "1", Minor: "8+"},
expected: ServerVersion{Major: 1, Minor: 8},
},
{
input: version.Info{Major: "", Minor: "", GitVersion: "v1.8.0"},
expected: ServerVersion{Major: 1, Minor: 8},
},
{
input: version.Info{Major: "1", Minor: "", GitVersion: "v1.8.0"},
expected: ServerVersion{Major: 1, Minor: 8},
},
{
input: version.Info{Major: "", Minor: "8", GitVersion: "v1.8.0"},
expected: ServerVersion{Major: 1, Minor: 8},
},
{
input: version.Info{Major: "", Minor: "", GitVersion: "v1.8.8-test.0"},
expected: ServerVersion{Major: 1, Minor: 8},
},
{
input: version.Info{Major: "1", Minor: "8", GitVersion: "v1.9.0"},
expected: ServerVersion{Major: 1, Minor: 8},
},
{
input: version.Info{Major: "", Minor: "", GitVersion: "v1.a"},
error: true,
},
}
for _, test := range tests {
v, err := ParseVersion(&test.input)
if test.error {
if err == nil {
t.Errorf("test %s should have failed and did not", test.input)
}
continue
}
if err != nil {
t.Errorf("test %v failed: %v", test.input, err)
continue
}
if v != test.expected {
t.Errorf("Expected %v, got %v", test.expected, v)
}
}
}
func TestVersionCompare(t *testing.T) {
v := ServerVersion{Major: 2, Minor: 3}
tests := []struct {
major, minor, result int
}{
{major: 1, minor: 0, result: 1},
{major: 2, minor: 0, result: 1},
{major: 2, minor: 2, result: 1},
{major: 2, minor: 3, result: 0},
{major: 2, minor: 4, result: -1},
{major: 3, minor: 0, result: -1},
}
for _, test := range tests {
res := v.Compare(test.major, test.minor)
if res != test.result {
t.Errorf("%d.%d => Expected %d, got %d", test.major, test.minor, test.result, res)
}
}
}
func TestResourceNameFor(t *testing.T) {
obj := &unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": "tests/v1alpha1",
"kind": "Test",
"metadata": map[string]interface{}{
"name": "myname",
"namespace": "mynamespace",
},
},
}
fake := &ktesting.Fake{
Resources: []*metav1.APIResourceList{
{
GroupVersion: "tests/v1alpha1",
APIResources: []metav1.APIResource{
{
Name: "tests",
Kind: "Test",
},
},
},
},
}
disco := &fakediscovery.FakeDiscovery{Fake: fake}
if n := ResourceNameFor(disco, obj); n != "tests" {
t.Errorf("Got resource name %q for %v", n, obj)
}
obj.SetKind("Unknown")
if n := ResourceNameFor(disco, obj); n != "unknown"
|
obj.SetGroupVersionKind(schema.GroupVersionKind{Group: "unknown", Version: "noversion", Kind: "SomeKind"})
if n := ResourceNameFor(disco, obj); n != "somekind" {
t.Errorf("Got resource name %q for %v", n, obj)
}
}
func TestFqName(t *testing.T) {
obj := &unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": "tests/v1alpha1",
"kind": "Test",
"metadata": map[string]interface{}{
"name": "myname",
},
},
}
if n := FqName(obj); n != "myname" {
t.Errorf("Got %q for %v", n, obj)
}
obj.SetNamespace("mynamespace")
if n := FqName(obj); n != "mynamespace.myname" {
t.Errorf("Got %q for %v", n, obj)
}
}
|
{
t.Errorf("Got resource name %q for %v", n, obj)
}
|
rd_tim_conf.rs
|
#[doc = "Register `RD_TIM_CONF` reader"]
pub struct R(crate::R<RD_TIM_CONF_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<RD_TIM_CONF_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<RD_TIM_CONF_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<RD_TIM_CONF_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `RD_TIM_CONF` writer"]
pub struct W(crate::W<RD_TIM_CONF_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<RD_TIM_CONF_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<RD_TIM_CONF_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<RD_TIM_CONF_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `READ_INIT_NUM` reader - Configures the initial read time of eFuse."]
pub struct READ_INIT_NUM_R(crate::FieldReader<u8, u8>);
impl READ_INIT_NUM_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
READ_INIT_NUM_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for READ_INIT_NUM_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `READ_INIT_NUM` writer - Configures the initial read time of eFuse."]
pub struct READ_INIT_NUM_W<'a> {
w: &'a mut W,
}
impl<'a> READ_INIT_NUM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn
|
(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 24)) | ((value as u32 & 0xff) << 24);
self.w
}
}
impl R {
#[doc = "Bits 24:31 - Configures the initial read time of eFuse."]
#[inline(always)]
pub fn read_init_num(&self) -> READ_INIT_NUM_R {
READ_INIT_NUM_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 24:31 - Configures the initial read time of eFuse."]
#[inline(always)]
pub fn read_init_num(&mut self) -> READ_INIT_NUM_W {
READ_INIT_NUM_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Configures read timing parameters.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rd_tim_conf](index.html) module"]
pub struct RD_TIM_CONF_SPEC;
impl crate::RegisterSpec for RD_TIM_CONF_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [rd_tim_conf::R](R) reader structure"]
impl crate::Readable for RD_TIM_CONF_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [rd_tim_conf::W](W) writer structure"]
impl crate::Writable for RD_TIM_CONF_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets RD_TIM_CONF to value 0x1200_0000"]
impl crate::Resettable for RD_TIM_CONF_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0x1200_0000
}
}
|
bits
|
labeled_ty.rs
|
//! Provides a wrapper around `rustc::ty::Ty` with a label attached to each type constructor. This
//!
//! Labeled type data is manipulated by reference, the same as with `Ty`s, and the data is stored
//! in the same arena as the underlying `Ty`s.
use std::fmt;
use std::marker::PhantomData;
use arena::SyncDroplessArena;
use rustc::ty::{Ty, TypeVariants};
use type_map;
/// The actual data for a labeled type.
///
/// This struct shouldn't be constructed directly - instead, use `LabeledTyCtxt` methods to build
/// instances inside the tcx arena and return `LabeledTy` references.
///
/// Labeled types have to mimic the tree structure of the underlying `Ty`, so that each type
/// constructor in the tree can have its own label. But maintaining a custom copy of
/// `TypeVariants` would be annoying, so instead, we let labeled types form arbitrary trees, and
/// make the `LabeledTyCtxt` responsible for making those trees match the `Ty`'s structure.
#[derive(Clone, PartialEq, Eq)]
pub struct LabeledTyS<'tcx, L: 'tcx> {
/// The underlying type.
pub ty: Ty<'tcx>,
/// The arguments of this type constructor. The number and meaning of these arguments depends
/// on which type constructor this is (specifically, which `TypeVariants` variant is used for
/// `self.ty.sty`).
pub args: &'tcx [LabeledTy<'tcx, L>],
/// The label for the current type constructor.
pub label: L,
}
/// A labeled type. Like `rustc::ty::Ty`, this is a reference to some arena-allocated data.
pub type LabeledTy<'tcx, L> = &'tcx LabeledTyS<'tcx, L>;
impl<'tcx, L: fmt::Debug> fmt::Debug for LabeledTyS<'tcx, L> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}#{:?}{:?}", self.label, self.ty, self.args)
}
}
impl<'tcx, L> LabeledTyS<'tcx, L> {
pub fn for_each_label<F: FnMut(&'tcx L)>(&'tcx self, callback: &mut F) {
callback(&self.label);
for &arg in self.args {
arg.for_each_label(callback);
}
}
}
/// Context for constructing `LabeledTy`s.
pub struct LabeledTyCtxt<'tcx, L: 'tcx> {
arena: &'tcx SyncDroplessArena,
_marker: PhantomData<L>,
}
impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> {
/// Build a new `LabeledTyCtxt`. The `arena` must be the same one used by the `TyCtxt` that
/// built the underlying `Ty`s to be labeled.
pub fn new(arena: &'tcx SyncDroplessArena) -> LabeledTyCtxt<'tcx, L> {
LabeledTyCtxt {
arena: arena,
_marker: PhantomData,
}
}
/// Manually construct a slice in the context's arena.
pub fn mk_slice(&self, ltys: &[LabeledTy<'tcx, L>]) -> &'tcx [LabeledTy<'tcx, L>] {
if ltys.len() == 0 {
return &[];
}
self.arena.alloc_slice(ltys)
}
/// Manually construct a labeled type. Note that this does not do any checks on `args`! The
/// caller is responsible for making sure the number of arguments matches `ty.sty`.
pub fn mk(&self, ty: Ty<'tcx>, args: &'tcx [LabeledTy<'tcx, L>], label: L) -> LabeledTy<'tcx, L> {
self.arena.alloc(LabeledTyS {
ty: ty,
args: args,
label: label,
})
}
/// Label a `Ty` using a callback. The callback runs at every type constructor to produce a
/// label for that node in the tree.
pub fn label<F: FnMut(Ty<'tcx>) -> L>(&self, ty: Ty<'tcx>, f: &mut F) -> LabeledTy<'tcx, L> {
use rustc::ty::TypeVariants::*;
let label = f(ty);
match ty.sty {
// Types with no arguments
TyBool |
TyChar |
TyInt(_) |
TyUint(_) |
TyFloat(_) |
TyStr |
TyNever => self.mk(ty, &[], label),
// Types with arguments
TyAdt(_, substs) => {
let args = substs.types().map(|t| self.label(t, f)).collect::<Vec<_>>();
self.mk(ty, self.mk_slice(&args), label)
},
TyArray(elem, _) => {
let args = [self.label(elem, f)];
self.mk(ty, self.mk_slice(&args), label)
},
TySlice(elem) => {
let args = [self.label(elem, f)];
self.mk(ty, self.mk_slice(&args), label)
},
TyRawPtr(mty) => {
let args = [self.label(mty.ty, f)];
self.mk(ty, self.mk_slice(&args), label)
},
TyRef(_, mty, _) => {
let args = [self.label(mty, f)];
self.mk(ty, self.mk_slice(&args), label)
},
TyFnDef(_, substs) => {
let args = substs.types().map(|ty| self.label(ty, f)).collect::<Vec<_>>();
self.mk(ty, self.mk_slice(&args), label)
},
TyFnPtr(ref sig) => {
let args = sig.skip_binder().inputs_and_output.iter()
.map(|ty| self.label(ty, f)).collect::<Vec<_>>();
self.mk(ty, self.mk_slice(&args), label)
},
TyTuple(ref elems) => {
let args = elems.iter().map(|ty| self.label(ty, f)).collect::<Vec<_>>();
self.mk(ty, self.mk_slice(&args), label)
},
// Types that aren't actually supported by this code yet
TyDynamic(..) |
TyClosure(..) |
TyGenerator(..) |
TyGeneratorWitness(..) |
TyProjection(..) |
TyAnon(..) |
TyParam(..) |
TyInfer(..) |
TyForeign(..) |
TyError => self.mk(ty, &[], label),
}
}
/// Label multiple `Ty`s using a callback.
pub fn
|
<F>(&self,
tys: &[Ty<'tcx>],
f: &mut F) -> &'tcx [LabeledTy<'tcx, L>]
where F: FnMut(Ty<'tcx>) -> L {
self.mk_slice(&tys.iter().map(|ty| self.label(ty, f)).collect::<Vec<_>>())
}
/// Substitute in arguments for any type parameter references (`TyParam`) in a labeled type.
/// Panics if `lty` contains a reference to a type parameter that is past the end of `substs`
/// (usually this means the caller is providing the wrong list of type arguments as `substs`).
///
/// TODO: This produces a `LabeledTy` with the right structure, but doesn't actually do
/// substitution on the underlying `Ty`s! This means if you substitute `u32` for `T`, you can
/// end up with a `LabeledTy` whose `ty` is `S<T>`, but whose args are `[u32]`. By some
/// miracle, this hasn't broken anything yet, but we may need to fix it eventually.
pub fn subst(&self,
lty: LabeledTy<'tcx, L>,
substs: &[LabeledTy<'tcx, L>]) -> LabeledTy<'tcx, L> {
match lty.ty.sty {
TypeVariants::TyParam(ref tp) => {
substs[tp.idx as usize]
},
_ => self.mk(lty.ty, self.subst_slice(lty.args, substs), lty.label.clone()),
}
}
/// Substitute arguments in multiple labeled types.
pub fn subst_slice(&self,
ltys: &[LabeledTy<'tcx, L>],
substs: &[LabeledTy<'tcx, L>]) -> &'tcx [LabeledTy<'tcx, L>] {
self.mk_slice(<ys.iter().map(|lty| self.subst(lty, substs)).collect::<Vec<_>>())
}
/// Run a callback to replace the labels on a type.
pub fn relabel<L2, F>(&self, lty: LabeledTy<'tcx, L2>, func: &mut F) -> LabeledTy<'tcx, L>
where F: FnMut(&L2) -> L {
let args = self.relabel_slice(lty.args, func);
self.mk(lty.ty, args, func(<y.label))
}
/// Replace the labels on several labeled types.
pub fn relabel_slice<L2, F>(&self,
ltys: &'tcx [LabeledTy<'tcx, L2>],
func: &mut F) -> &'tcx [LabeledTy<'tcx, L>]
where F: FnMut(&L2) -> L {
let ltys = ltys.iter().cloned().map(|lty| self.relabel(lty, func)).collect::<Vec<_>>();
self.mk_slice(<ys)
}
}
impl<'tcx, L: fmt::Debug> type_map::Type for LabeledTy<'tcx, L> {
fn sty(&self) -> &TypeVariants {
&self.ty.sty
}
fn num_args(&self) -> usize {
self.args.len()
}
fn arg(&self, idx: usize) -> Self {
self.args[idx]
}
}
|
label_slice
|
util.py
|
"""
Miscellaneous package utilities.
.. include:: ../include/links.rst
"""
from itertools import chain, combinations
from IPython import embed
import numpy
def all_subclasses(cls):
"""
Collect all the subclasses of the provided class.
The search follows the inheritance to the highest-level class. Intermediate
base classes are included in the returned set, but not the base class itself.
Thanks to:
https://stackoverflow.com/questions/3862310/how-to-find-all-the-subclasses-of-a-class-given-its-name
Args:
cls (object):
The base class
Returns:
:obj:`set`: The unique set of derived classes, including any
intermediate base classes in the inheritance thread.
"""
return set(cls.__subclasses__()).union(
[s for c in cls.__subclasses__() for s in all_subclasses(c)])
def string_table(tbl, delimeter='print', has_header=True):
"""
Provided the array of data, format it with equally spaced columns
and add a header (first row) and contents delimeter.
Args:
tbl (`numpy.ndarray`_):
Array of string representations of the data to print.
delimeter (:obj:`str`, optional):
If the first row in the table containts the column headers (see
``has_header``), this sets the delimeter between first table row and
the column data. Use ``'print'`` for a simple line of hyphens,
anything else results in an ``rst`` style table formatting.
has_header (:obj:`bool`, optional):
The first row in ``tbl`` contains the column headers.
Returns:
:obj:`str`: Single long string with the data table.
"""
nrows, ncols = tbl.shape
col_width = [numpy.amax([len(dij) for dij in dj]) for dj in tbl.T]
_nrows = nrows
start = 1
if delimeter != 'print':
_nrows += 2
start += 1
if has_header:
_nrows += 1
start += 1
|
row_string = ['']*_nrows
for i in range(start,nrows+start-1):
row_string[i] = ' '.join([tbl[1+i-start,j].ljust(col_width[j]) for j in range(ncols)])
if delimeter == 'print':
# Heading row
row_string[0] = ' '.join([tbl[0,j].ljust(col_width[j]) for j in range(ncols)])
# Delimiter
if has_header:
row_string[1] = '-'*len(row_string[0])
return '\n'.join(row_string)+'\n'
# For an rst table
row_string[0] = ' '.join([ '='*col_width[j] for j in range(ncols)])
row_string[1] = ' '.join([tbl[0,j].ljust(col_width[j]) for j in range(ncols)])
if has_header:
row_string[2] = row_string[0]
row_string[-1] = row_string[0]
return '\n'.join(row_string)+'\n'
def powerset(iterable, reverse=False):
""""
Construct an iterable that steps through all combinations of the
provided iterable.
This is pulled from the recipes provided by the itertools
documentation.
Examples:
Get all unique combinations of the list [1,2,3]:
>>> list(powerset([1,2,3]))
[() (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)]
Args:
iterable (iterable):
An iterable object
reverse (:obj:`bool`, optional):
Reverse the order (only roughly) of the iterable by placing
the longer sequences first.
Returns:
`itertools.chain`: Iterable object that returns the sequence of
combinations.
"""
rng = range(len(iterable)+1)[::-1] if reverse else range(len(iterable)+1)
return chain.from_iterable(combinations(iterable, r) for r in rng)
def polygon_winding_number(polygon, point):
"""
Determine the winding number of a 2D polygon about a point.
The code does **not** check if the polygon is simple (no interesecting line
segments). Algorithm taken from Numerical Recipes Section 21.4.
Args:
polygon (`numpy.ndarray`_):
An Nx2 array containing the x,y coordinates of a polygon.
The points should be ordered either counter-clockwise or
clockwise.
point (`numpy.ndarray`_):
One or more points for the winding number calculation.
Must be either a 2-element array for a single (x,y) pair,
or an Nx2 array with N (x,y) points.
Returns:
:obj:`int`, `numpy.ndarray`_: The winding number of each point with
respect to the provided polygon. Points inside the polygon have winding
numbers of 1 or -1; see :func:`point_inside_polygon`.
Raises:
ValueError:
Raised if ``polygon`` is not 2D, if ``polygon`` does not have two
columns, or if the last axis of ``point`` does not have 2 and only 2
elements.
"""
# Check input shape is for 2D only
if len(polygon.shape) != 2:
raise ValueError('Polygon must be an Nx2 array.')
if polygon.shape[1] != 2:
raise ValueError('Polygon must be in two dimensions.')
_point = numpy.atleast_2d(point)
if _point.shape[1] != 2:
raise ValueError('Point must contain two elements.')
# Get the winding number
nvert = polygon.shape[0]
npnt = _point.shape[0]
dl = numpy.roll(polygon, 1, axis=0)[None,:,:] - _point[:,None,:]
dr = polygon[None,:,:] - point[:,None,:]
dx = dl[...,0]*dr[...,1] - dl[...,1]*dr[...,0]
indx_l = dl[...,1] > 0
indx_r = dr[...,1] > 0
wind = numpy.zeros((npnt, nvert), dtype=int)
wind[indx_l & numpy.logical_not(indx_r) & (dx < 0)] = -1
wind[numpy.logical_not(indx_l) & indx_r & (dx > 0)] = 1
return numpy.sum(wind, axis=1)[0] if point.ndim == 1 else numpy.sum(wind, axis=1)
def point_inside_polygon(polygon, point):
"""
Determine if one or more points is inside the provided polygon.
Primarily a wrapper for :func:`polygon_winding_number`, that
returns True for each point that is inside the polygon.
Args:
polygon (`numpy.ndarray`_):
An Nx2 array containing the x,y coordinates of a polygon.
The points should be ordered either counter-clockwise or
clockwise.
point (`numpy.ndarray`_):
One or more points for the winding number calculation.
Must be either a 2-element array for a single (x,y) pair,
or an Nx2 array with N (x,y) points.
Returns:
:obj:`bool`, `numpy.ndarray`: Boolean indicating whether or not each
point is within the polygon.
"""
return numpy.absolute(polygon_winding_number(polygon, point)) == 1
| |
mesh.rs
|
use context::DebugId;
use gfx::traits::FactoryExt;
use graphics::*;
use lyon;
use lyon::tessellation as t;
/// A builder for creating `Mesh`es.
///
/// This allows you to easily make one `Mesh` containing
/// many different complex pieces of geometry. They don't
/// have to be connected to each other, and will all be
/// drawn at once.
///
/// The following example shows how to build a mesh containing a line and a circle:
///
/// ```rust
/// # use ggez::*;
/// # use ggez::graphics::*;
/// # fn t(ctx: &mut Context) {
/// let mesh: Mesh = MeshBuilder::new()
/// .line(&[Point2::new(20.0, 20.0), Point2::new(40.0, 20.0)], 4.0)
/// .circle(DrawMode::Fill, Point2::new(60.0, 38.0), 40.0, 1.0)
/// .build(ctx)
/// .unwrap();
/// # }
/// ```
/// A more sophisticated example:
///
/// ```rust
/// use ggez::{Context, GameResult};
/// use ggez::graphics::{self, DrawMode, MeshBuilder, Point2};
///
/// fn draw_danger_signs(ctx: &mut Context) -> GameResult<()> {
/// // Initialize a builder instance.
/// let mesh = MeshBuilder::new()
/// // Add vertices for 3 lines (in an approximate equilateral triangle).
/// .line(
/// &[
/// Point2::new(0.0, 0.0),
/// Point2::new(-30.0, 52.0),
/// Point2::new(30.0, 52.0),
/// Point2::new(0.0, 0.0),
/// ],
/// 1.0,
/// )
/// // Add vertices for an exclamation mark!
/// .ellipse(DrawMode::Fill, Point2::new(0.0, 25.0), 2.0, 15.0, 2.0)
/// .circle(DrawMode::Fill, Point2::new(0.0, 45.0), 2.0, 2.0)
/// // Finalize then unwrap. Unwrapping via `?` operator either yields the final `Mesh`,
/// // or propagates the error (note return type).
/// .build(ctx)?;
/// // Draw 3 meshes in a line, 1st and 3rd tilted by 1 radian.
/// graphics::draw(ctx, &mesh, Point2::new(50.0, 50.0), -1.0).unwrap();
/// graphics::draw(ctx, &mesh, Point2::new(150.0, 50.0), 0.0).unwrap();
/// graphics::draw(ctx, &mesh, Point2::new(250.0, 50.0), 1.0).unwrap();
/// Ok(())
/// }
/// ```
#[derive(Debug, Clone)]
pub struct MeshBuilder {
buffer: t::geometry_builder::VertexBuffers<Vertex>,
}
impl Default for MeshBuilder {
fn default() -> Self {
Self {
buffer: t::VertexBuffers::new(),
}
}
}
impl MeshBuilder {
/// Create a new MeshBuilder.
pub fn new() -> Self {
Self::default()
}
/// Create a new mesh for a line of one or more connected segments.
pub fn line(&mut self, points: &[Point2], width: f32) -> &mut Self {
self.polyline(DrawMode::Line(width), points)
}
/// Create a new mesh for a circle.
///
/// For the meaning of the `tolerance` parameter, [see here](https://docs.rs/lyon_geom/0.9.0/lyon_geom/#flattening).
pub fn circle(
&mut self,
mode: DrawMode,
point: Point2,
radius: f32,
tolerance: f32,
) -> &mut Self {
{
let buffers = &mut self.buffer;
match mode {
DrawMode::Fill => {
// These builders have to be in separate match arms 'cause they're actually
// different types; one is GeometryBuilder<StrokeVertex> and the other is
// GeometryBuilder<FillVertex>
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let fill_options = t::FillOptions::default().with_tolerance(tolerance);
t::basic_shapes::fill_circle(
|
builder,
);
}
DrawMode::Line(line_width) => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let options = t::StrokeOptions::default()
.with_line_width(line_width)
.with_tolerance(tolerance);
t::basic_shapes::stroke_circle(
t::math::point(point.x, point.y),
radius,
&options,
builder,
);
}
};
}
self
}
/// Create a new mesh for an ellipse.
///
/// For the meaning of the `tolerance` parameter, [see here](https://docs.rs/lyon_geom/0.9.0/lyon_geom/#flattening).
pub fn ellipse(
&mut self,
mode: DrawMode,
point: Point2,
radius1: f32,
radius2: f32,
tolerance: f32,
) -> &mut Self {
{
let buffers = &mut self.buffer;
match mode {
DrawMode::Fill => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let fill_options = t::FillOptions::default().with_tolerance(tolerance);
t::basic_shapes::fill_ellipse(
t::math::point(point.x, point.y),
t::math::vector(radius1, radius2),
t::math::Angle { radians: 0.0 },
&fill_options,
builder,
);
}
DrawMode::Line(line_width) => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let options = t::StrokeOptions::default()
.with_line_width(line_width)
.with_tolerance(tolerance);
t::basic_shapes::stroke_ellipse(
t::math::point(point.x, point.y),
t::math::vector(radius1, radius2),
t::math::Angle { radians: 0.0 },
&options,
builder,
);
}
};
}
self
}
/// Create a new mesh for a series of connected lines.
pub fn polyline(&mut self, mode: DrawMode, points: &[Point2]) -> &mut Self {
{
assert!(points.len() > 1);
let buffers = &mut self.buffer;
let points = points
.into_iter()
.map(|ggezpoint| t::math::point(ggezpoint.x, ggezpoint.y));
match mode {
DrawMode::Fill => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let tessellator = &mut t::FillTessellator::new();
let options = t::FillOptions::default();
// TODO: Removing this expect would be rather nice.
t::basic_shapes::fill_polyline(points, tessellator, &options, builder)
.expect("Could not fill polyline?");
}
DrawMode::Line(width) => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let options = t::StrokeOptions::default().with_line_width(width);
t::basic_shapes::stroke_polyline(points, false, &options, builder);
}
};
}
self
}
/// Create a new mesh for a closed polygon
pub fn polygon(&mut self, mode: DrawMode, points: &[Point2]) -> &mut Self {
{
let buffers = &mut self.buffer;
let points = points
.into_iter()
.map(|ggezpoint| t::math::point(ggezpoint.x, ggezpoint.y));
match mode {
DrawMode::Fill => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let tessellator = &mut t::FillTessellator::new();
let options = t::FillOptions::default();
// TODO: Removing this expect would be rather nice.
t::basic_shapes::fill_polyline(points, tessellator, &options, builder)
.expect("Could not fill polygon?");
}
DrawMode::Line(width) => {
let builder = &mut t::BuffersBuilder::new(buffers, VertexBuilder);
let options = t::StrokeOptions::default().with_line_width(width);
t::basic_shapes::stroke_polyline(points, true, &options, builder);
}
};
}
self
}
/// Create a new `Mesh` from a raw list of triangles.
///
/// Currently does not support UV's or indices.
pub fn triangles(&mut self, triangles: &[Point2]) -> &mut Self {
{
assert_eq!(triangles.len() % 3, 0);
let tris = triangles
.iter()
.cloned()
.map(|p| {
// Gotta turn ggez Point2's into lyon FillVertex's
let np = lyon::math::point(p.x, p.y);
let nv = lyon::math::vector(p.x, p.y);
t::FillVertex {
position: np,
normal: nv,
}
})
// Can we remove this collect?
// Probably means collecting into chunks first, THEN
// converting point types, since we can't chunk an iterator,
// only a slice. Not sure that's an improvement.
.collect::<Vec<_>>();
let tris = tris.chunks(3);
let builder: &mut t::BuffersBuilder<_, _, _> =
&mut t::BuffersBuilder::new(&mut self.buffer, VertexBuilder);
use lyon::tessellation::GeometryBuilder;
builder.begin_geometry();
for tri in tris {
// Ideally this assert makes bounds-checks only happen once.
assert!(tri.len() == 3);
let fst = tri[0];
let snd = tri[1];
let thd = tri[2];
let i1 = builder.add_vertex(fst);
let i2 = builder.add_vertex(snd);
let i3 = builder.add_vertex(thd);
builder.add_triangle(i1, i2, i3);
}
builder.end_geometry();
}
self
}
/// Takes the accumulated geometry and load it into GPU memory,
/// creating a single `Mesh`.
pub fn build(&self, ctx: &mut Context) -> GameResult<Mesh> {
let (vbuf, slice) = ctx.gfx_context
.factory
.create_vertex_buffer_with_slice(&self.buffer.vertices[..], &self.buffer.indices[..]);
Ok(Mesh {
buffer: vbuf,
slice,
blend_mode: None,
debug_id: DebugId::get(ctx),
})
}
}
struct VertexBuilder;
impl t::VertexConstructor<t::FillVertex, Vertex> for VertexBuilder {
fn new_vertex(&mut self, vertex: t::FillVertex) -> Vertex {
Vertex {
pos: [vertex.position.x, vertex.position.y],
uv: [0.0, 0.0],
}
}
}
impl t::VertexConstructor<t::StrokeVertex, Vertex> for VertexBuilder {
fn new_vertex(&mut self, vertex: t::StrokeVertex) -> Vertex {
Vertex {
pos: [vertex.position.x, vertex.position.y],
uv: [0.0, 0.0],
}
}
}
/// 2D polygon mesh.
///
/// All of its creation methods are just shortcuts for doing the same operation
/// via a `MeshBuilder`.
#[derive(Debug, Clone, PartialEq)]
pub struct Mesh {
buffer: gfx::handle::Buffer<gfx_device_gl::Resources, Vertex>,
slice: gfx::Slice<gfx_device_gl::Resources>,
blend_mode: Option<BlendMode>,
debug_id: DebugId,
}
impl Mesh {
/// Create a new mesh for a line of one or more connected segments.
pub fn new_line(ctx: &mut Context, points: &[Point2], width: f32) -> GameResult<Mesh> {
let mut mb = MeshBuilder::new();
mb.polyline(DrawMode::Line(width), points);
mb.build(ctx)
}
/// Create a new mesh for a circle.
pub fn new_circle(
ctx: &mut Context,
mode: DrawMode,
point: Point2,
radius: f32,
tolerance: f32,
) -> GameResult<Mesh> {
let mut mb = MeshBuilder::new();
mb.circle(mode, point, radius, tolerance);
mb.build(ctx)
}
/// Create a new mesh for an ellipse.
pub fn new_ellipse(
ctx: &mut Context,
mode: DrawMode,
point: Point2,
radius1: f32,
radius2: f32,
tolerance: f32,
) -> GameResult<Mesh> {
let mut mb = MeshBuilder::new();
mb.ellipse(mode, point, radius1, radius2, tolerance);
mb.build(ctx)
}
/// Create a new mesh for series of connected lines
pub fn new_polyline(ctx: &mut Context, mode: DrawMode, points: &[Point2]) -> GameResult<Mesh> {
let mut mb = MeshBuilder::new();
mb.polyline(mode, points);
mb.build(ctx)
}
/// Create a new mesh for closed polygon
pub fn new_polygon(ctx: &mut Context, mode: DrawMode, points: &[Point2]) -> GameResult<Mesh> {
let mut mb = MeshBuilder::new();
mb.polygon(mode, points);
mb.build(ctx)
}
/// Create a new `Mesh` from a raw list of triangles.
pub fn from_triangles(ctx: &mut Context, triangles: &[Point2]) -> GameResult<Mesh> {
let mut mb = MeshBuilder::new();
mb.triangles(triangles);
mb.build(ctx)
}
}
impl Drawable for Mesh {
fn draw_ex(&self, ctx: &mut Context, param: DrawParam) -> GameResult<()> {
self.debug_id.assert(ctx);
let gfx = &mut ctx.gfx_context;
gfx.update_instance_properties(param)?;
gfx.data.vbuf = self.buffer.clone();
let texture = gfx.white_image.texture.clone();
let typed_thingy = super::GlBackendSpec::raw_to_typed_shader_resource(texture);
gfx.data.tex.0 = typed_thingy;
gfx.draw(Some(&self.slice))?;
Ok(())
}
fn set_blend_mode(&mut self, mode: Option<BlendMode>) {
self.blend_mode = mode;
}
fn get_blend_mode(&self) -> Option<BlendMode> {
self.blend_mode
}
}
|
t::math::point(point.x, point.y),
radius,
&fill_options,
|
ingressclass.go
|
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1beta1
import (
"context"
json "encoding/json"
"fmt"
"time"
v1beta1 "k8s.io/api/networking/v1beta1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
networkingv1beta1 "github.com/Angus-F/client-go/applyconfigurations/networking/v1beta1"
scheme "github.com/Angus-F/client-go/kubernetes/scheme"
rest "github.com/Angus-F/client-go/rest"
)
// IngressClassesGetter has a method to return a IngressClassInterface.
// A group's client should implement this interface.
type IngressClassesGetter interface {
IngressClasses() IngressClassInterface
}
// IngressClassInterface has methods to work with IngressClass resources.
type IngressClassInterface interface {
Create(ctx context.Context, ingressClass *v1beta1.IngressClass, opts v1.CreateOptions) (*v1beta1.IngressClass, error)
Update(ctx context.Context, ingressClass *v1beta1.IngressClass, opts v1.UpdateOptions) (*v1beta1.IngressClass, error)
Delete(ctx context.Context, name string, opts v1.DeleteOptions) error
DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error
Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.IngressClass, error)
List(ctx context.Context, opts v1.ListOptions) (*v1beta1.IngressClassList, error)
Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error)
Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.IngressClass, err error)
Apply(ctx context.Context, ingressClass *networkingv1beta1.IngressClassApplyConfiguration, opts v1.ApplyOptions) (result *v1beta1.IngressClass, err error)
IngressClassExpansion
}
// ingressClasses implements IngressClassInterface
type ingressClasses struct {
client rest.Interface
}
// newIngressClasses returns a IngressClasses
func newIngressClasses(c *NetworkingV1beta1Client) *ingressClasses
|
// Get takes name of the ingressClass, and returns the corresponding ingressClass object, and an error if there is any.
func (c *ingressClasses) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.IngressClass, err error) {
result = &v1beta1.IngressClass{}
err = c.client.Get().
Resource("ingressclasses").
Name(name).
VersionedParams(&options, scheme.ParameterCodec).
Do(ctx).
Into(result)
return
}
// List takes label and field selectors, and returns the list of IngressClasses that match those selectors.
func (c *ingressClasses) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.IngressClassList, err error) {
var timeout time.Duration
if opts.TimeoutSeconds != nil {
timeout = time.Duration(*opts.TimeoutSeconds) * time.Second
}
result = &v1beta1.IngressClassList{}
err = c.client.Get().
Resource("ingressclasses").
VersionedParams(&opts, scheme.ParameterCodec).
Timeout(timeout).
Do(ctx).
Into(result)
return
}
// Watch returns a watch.Interface that watches the requested ingressClasses.
func (c *ingressClasses) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) {
var timeout time.Duration
if opts.TimeoutSeconds != nil {
timeout = time.Duration(*opts.TimeoutSeconds) * time.Second
}
opts.Watch = true
return c.client.Get().
Resource("ingressclasses").
VersionedParams(&opts, scheme.ParameterCodec).
Timeout(timeout).
Watch(ctx)
}
// Create takes the representation of a ingressClass and creates it. Returns the server's representation of the ingressClass, and an error, if there is any.
func (c *ingressClasses) Create(ctx context.Context, ingressClass *v1beta1.IngressClass, opts v1.CreateOptions) (result *v1beta1.IngressClass, err error) {
result = &v1beta1.IngressClass{}
err = c.client.Post().
Resource("ingressclasses").
VersionedParams(&opts, scheme.ParameterCodec).
Body(ingressClass).
Do(ctx).
Into(result)
return
}
// Update takes the representation of a ingressClass and updates it. Returns the server's representation of the ingressClass, and an error, if there is any.
func (c *ingressClasses) Update(ctx context.Context, ingressClass *v1beta1.IngressClass, opts v1.UpdateOptions) (result *v1beta1.IngressClass, err error) {
result = &v1beta1.IngressClass{}
err = c.client.Put().
Resource("ingressclasses").
Name(ingressClass.Name).
VersionedParams(&opts, scheme.ParameterCodec).
Body(ingressClass).
Do(ctx).
Into(result)
return
}
// Delete takes name of the ingressClass and deletes it. Returns an error if one occurs.
func (c *ingressClasses) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error {
return c.client.Delete().
Resource("ingressclasses").
Name(name).
Body(&opts).
Do(ctx).
Error()
}
// DeleteCollection deletes a collection of objects.
func (c *ingressClasses) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {
var timeout time.Duration
if listOpts.TimeoutSeconds != nil {
timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second
}
return c.client.Delete().
Resource("ingressclasses").
VersionedParams(&listOpts, scheme.ParameterCodec).
Timeout(timeout).
Body(&opts).
Do(ctx).
Error()
}
// Patch applies the patch and returns the patched ingressClass.
func (c *ingressClasses) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.IngressClass, err error) {
result = &v1beta1.IngressClass{}
err = c.client.Patch(pt).
Resource("ingressclasses").
Name(name).
SubResource(subresources...).
VersionedParams(&opts, scheme.ParameterCodec).
Body(data).
Do(ctx).
Into(result)
return
}
// Apply takes the given apply declarative configuration, applies it and returns the applied ingressClass.
func (c *ingressClasses) Apply(ctx context.Context, ingressClass *networkingv1beta1.IngressClassApplyConfiguration, opts v1.ApplyOptions) (result *v1beta1.IngressClass, err error) {
if ingressClass == nil {
return nil, fmt.Errorf("ingressClass provided to Apply must not be nil")
}
patchOpts := opts.ToPatchOptions()
data, err := json.Marshal(ingressClass)
if err != nil {
return nil, err
}
name := ingressClass.Name
if name == nil {
return nil, fmt.Errorf("ingressClass.Name must be provided to Apply")
}
result = &v1beta1.IngressClass{}
err = c.client.Patch(types.ApplyPatchType).
Resource("ingressclasses").
Name(*name).
VersionedParams(&patchOpts, scheme.ParameterCodec).
Body(data).
Do(ctx).
Into(result)
return
}
|
{
return &ingressClasses{
client: c.RESTClient(),
}
}
|
route.go
|
package route
import (
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"github.com/surplus-youyu/Youyu-se/controllers"
"github.com/surplus-youyu/Youyu-se/models"
"github.com/surplus-youyu/Youyu-se/utils"
)
func loginRequired() gin.HandlerFunc {
return func(c *gin.Context) {
session := sessions.Default(c)
email := session.Get("userEmail")
if email == nil {
c.Abort()
c.JSON(401, gin.H{
"status": false,
"msg": "you should login first",
})
return
}
user := models.GetUserByEmail(email.(string))[0]
c.Set("user", user)
c.Next()
}
}
func Route(r *gin.Engine)
|
{
api := r.Group("/api")
api.Use(utils.HandleError)
{
// auth api
api.PUT("/login", controllers.LoginHandler)
api.POST("/register", controllers.RegisterHandler)
// login middleware
api.Use(loginRequired())
//loginout
api.PUT("/logout", controllers.LogoutHandler)
// tasks apis
api.GET("/tasks", controllers.GetTaskList) // all tasks
api.POST("/tasks", controllers.CreateTask) // create task
api.GET("/tasks/:task_id", controllers.GetTaskByID) // get task
api.GET("/tasks/:task_id/files/:filename", controllers.GetTaskFiles) // get task
api.PUT("/tasks/:task_id", controllers.FinishTask) // finish the task and close it, only can be accessed by owner
// assignments apis
api.GET("/assignments", controllers.GetAssignList) // get current user's assignments
api.POST("/assignments", controllers.AssignTask) // create assignment with task id
api.GET("/assignments/:assign_id", controllers.GetAssignmentByID) // get assignment detail, only can be accessed by assignee
api.PUT("/assignments/:assign_id", controllers.SubmitAssign) // submit assginment content
api.GET("/tasks/:task_id/assignments", controllers.GetAssignListByTaskID) // get assignments with task id, only can be accessed by owner
api.PUT("/tasks/:task_id/assignments/:assign_id", controllers.JudgeAssignment) // judge the assignment
api.GET("/tasks/:task_id/statistics", controllers.GetSurveyStatistics) // get survey statistics
// user apis
api.GET("/user", controllers.GetUserInfo)
api.PUT("/user", controllers.UpdateUserInfo)
api.GET("/user/:uid", controllers.GetUserInfoById) // fuck httprouter
api.GET("/user/:uid/avatar", controllers.GetAvatar)
api.POST("/user/:uid/avatar", controllers.UpdateAvatar)
}
}
|
|
instantiate.rs
|
extern crate alloc;
use alloc::rc::Rc;
use core::cell::RefCell;
use cranelift_codegen::settings;
use cranelift_codegen::settings::Configurable;
use more_asserts::assert_gt;
use std::collections::HashMap;
use std::path::PathBuf;
use wasmtime_jit::{instantiate, CompilationStrategy, Compiler, NullResolver};
const PATH_MODULE_RS2WASM_ADD_FUNC: &str = r"tests/wat/rs2wasm-add-func.wat";
/// Simple test reading a wasm-file and translating to binary representation.
#[test]
fn
|
() {
let path = PathBuf::from(PATH_MODULE_RS2WASM_ADD_FUNC);
let data = wat::parse_file(path).expect("expecting valid wat-file");
assert_gt!(data.len(), 0);
let mut flag_builder = settings::builder();
flag_builder.enable("enable_verifier").unwrap();
let isa_builder = cranelift_native::builder().unwrap_or_else(|_| {
panic!("host machine is not a supported target");
});
let isa = isa_builder.finish(settings::Flags::new(flag_builder));
let mut resolver = NullResolver {};
let mut compiler = Compiler::new(isa, CompilationStrategy::Auto);
let global_exports = Rc::new(RefCell::new(HashMap::new()));
let instance = instantiate(&mut compiler, &data, &mut resolver, global_exports, false);
assert!(instance.is_ok());
}
|
test_environ_translate
|
root.go
|
package commands
import (
"github.com/spf13/cobra"
"os"
"path/filepath"
)
var (
Verbose bool
HomeDir string
RootCmd = &cobra.Command{
Use: "server",
Short: "server",
Long: `GRPC server`,
Run: func(cmd *cobra.Command, args []string) {
cmd.Help()
},
}
)
//Initialize flags for all commands
func
|
(){
RootCmd.PersistentFlags().BoolVarP(&Verbose,"verbos","v",true,"verbose output")
RootCmd.PersistentFlags().StringVarP(&HomeDir,"home","",os.ExpandEnv(filepath.Join("$HOME","GrpcKeys")),"home directory for client key file")
}
|
init
|
quad.py
|
import numpy as np
import os
from astropy.io import fits
import operator
import itertools
class ImgCat:
"""
Represent an individual image and its associated catalog, starlist, quads etc.
"""
def __init__(self, filepath, hdu=0, cat=None):
"""
:param filepath: Path to the FITS file, or alternatively just a string to identify the image.
:type filepath: string
:param cat: Catalog generated by SExtractor (if available -- if not, we'll make our own)
:type cat: asciidata catalog
:param hdu: The hdu containing the science data from which I should build the catalog. 0 is primary. If multihdu, 1 is usually science.
"""
self.filepath = filepath
(imgdir, filename) = os.path.split(filepath)
(common, ext) = os.path.splitext(filename)
self.name = common
self.hdu = hdu
self.cat = cat
self.starlist = []
self.mindist = 0.0
self.xlim = (0.0, 0.0) # Will be set using the catalog -- no need for the FITS image.
self.ylim = (0.0, 0.0)
self.quadlist = []
self.quadlevel = 0 # encodes what kind of quads have already been computed
def makestarlist(self, skipsaturated=False, n=200):
if skipsaturated:
maxflag = 3
else:
maxflag = 7
hdu = fits.open(self.filepath)
cats = hdu[2].data
self.starlist = sortstarlistbyflux(cats)[:n]
(xmin, xmax, ymin, ymax) = area(cats, border=0.01)
self.xlim = (xmin, xmax)
self.ylim = (ymin, ymax)
# Given this starlists, what is a good minimal distance for stars in quads ?
self.mindist = min(min(xmax - xmin, ymax - ymin) / 10.0, 30.0)
def makemorequads(self, verbose=True):
"""
We add more quads, following the quadlevel.
"""
#if not add:
# self.quadlist = []
if verbose:
print("Making more quads, from quadlevel %i ..." % self.quadlevel)
if self.quadlevel == 0:
self.quadlist.extend(makequads1(self.starlist, n=7, d=self.mindist, verbose=verbose))
elif self.quadlevel == 1:
self.quadlist.extend(makequads2(self.starlist, f=3, n=5, d=self.mindist, verbose=verbose))
elif self.quadlevel == 2:
self.quadlist.extend(makequads2(self.starlist, f=6, n=5, d=self.mindist, verbose=verbose))
elif self.quadlevel == 3:
self.quadlist.extend(makequads2(self.starlist, f=12, n=5, d=self.mindist, verbose=verbose))
elif self.quadlevel == 4:
self.quadlist.extend(makequads2(self.starlist, f=10, n=6, s=3, d=self.mindist, verbose=verbose))
else:
return False
self.quadlist = removeduplicates(self.quadlist, verbose=verbose)
self.quadlevel += 1
return True
class Quad:
"""
A geometric "hash", or asterism, as used in Astrometry.net :
http://adsabs.harvard.edu/cgi-bin/bib_query?arXiv:0910.2233
It is made out of 4 stars, and it is shift / scale / rotation invariant
"""
def __init__(self, fourstars):
"""
fourstars is a list of four stars
We make the following attributes :
self.hash
self.stars (in the order A, B, C, D)
"""
assert len(fourstars) == 4
tests = [(0,1), (0,2), (0,3), (1,2), (1,3), (2,3)]
other = [(2,3), (1,3), (1,2), (0,3), (0,2), (0,1)]
dists = np.array([np.linalg.norm(np.array(fourstars[0]['x'], fourstars[0]['y']) - np.array(fourstars[1]['x'], fourstars[1]['y'])) for (i,j) in tests])
assert np.min(dists) > 1.0
maxindex = np.argmax(dists)
(Ai, Bi) = tests[maxindex] # Indexes of stars A and B
(Ci, Di) = other[maxindex] # Indexes of stars C and D
A = fourstars[Ai]
B = fourstars[Bi]
C = fourstars[Ci]
D = fourstars[Di]
# We look for matrix transform [[a -b], [b a]] + [c d] that brings A and B to 00 11 :
x = B['x'] - A['x']
y = B['y'] - A['y']
b = (x-y)/(x*x + y*y)
a = (1.0/x) * (1.0 + b*y)
c = b*A['y'] - a*A['x']
d = - (b*A['x'] + a*A['y'])
t = SimpleTransform((a, b, c, d))
# Test
#print(t.apply((A['x'], A['y'])))
#print(t.apply((B.x, B['y'])))
(xC, yC) = t.apply(x = C['x'], y = C['y'])
(xD, yD) = t.apply(x = D['x'], y = D['y'])
# Normal case
self.hash = (xC, yC, xD, yD)
# Break symmetries :
testa = xC > xD
testb = xC + xD > 1
if testa and not testb: # we switch C and D
#print("a")
self.hash = (xD, yD, xC, yC)
(C, D) = (D, C)
if testb and not testa: # We switch A and B
#print("b")
self.hash = (1.0-xD, 1.0-yD, 1.0-xC, 1.0-yC)
(A, B) = (B, A)
(C, D) = (D, C)
if testa and testb:
#print("a + b")
self.hash = (1.0-xC, 1.0-yC, 1.0-xD, 1.0-yD)
(A, B) = (B, A)
# Checks :
assert self.hash[0] <= self.hash[2]
assert self.hash[0] + self.hash[2] <= 1
self.stars = [A, B, C, D] # Order might be different from the fourstars !
def __str__(self):
return "Hash : %6.3f %6.3f %6.3f %6.3f / IDs : (%s, %s, %s, %s)" % (
self.hash[0], self.hash[1], self.hash[2], self.hash[3],
self.stars[0].name, self.stars[1].name, self.stars[2].name, self.stars[3].name)
class SimpleTransform:
"""
Represents an affine transformation consisting of rotation, isotropic scaling, and shift.
[x', y'] = [[a -b], [b a]] * [x, y] + [c d]
"""
def __init__(self, v = (1, 0, 0, 0)):
"""
v = (a, b, c, d)
"""
self.v = np.asarray(v)
def getscaling(self):
return math.sqrt(self.v[0]*self.v[0] + self.v[1]*self.v[1])
def
|
(self):
"""
The CCW rotation angle, in degrees
"""
return math.atan2(self.v[1], self.v[0]) * (180.0/math.pi)# % 360.0
def __str__(self):
return "Rotation %+11.6f [deg], scale %8.6f" % (self.getrotation(), self.getscaling())
def inverse(self):
"""
Returns the inverse transform !
"""
# To represent affine transformations with matrices, we can use homogeneous coordinates.
homo = np.array([
[self.v[0], -self.v[1], self.v[2]],
[self.v[1], self.v[0], self.v[3]],
[0.0, 0.0, 1.0]
])
inv = np.linalg.inv(homo)
#print(inv)
return SimpleTransform((inv[0,0], inv[1,0], inv[0,2], inv[1,2]))
def matrixform(self):
"""
Special output for scipy.ndimage.interpolation.affine_transform
Returns (matrix, offset)
"""
return (np.array([[self.v[0], -self.v[1]], [self.v[1], self.v[0]]]), self.v[2:4])
def apply(self, x, y):
"""
Applies the transform to a point (x, y)
"""
xn = self.v[0]*x -self.v[1]*y + self.v[2]
yn = self.v[1]*x +self.v[0]*y + self.v[3]
return (xn, yn)
def applystar(self, star):
transstar = star.copy()
(transstar.x, transstar.y) = self.apply((transstar.x, transstar.y))
return transstar
def applystarlist(self, starlist):
return [self.applystar(star) for star in starlist]
def sortstarlistbyflux(starlist):
"""
We sort starlist according to flux : highest flux first !
"""
sortedstarlist = sorted(starlist, key=operator.itemgetter('flux'))
sortedstarlist.reverse()
return sortedstarlist
def area(stars, border=0.01):
"""
Returns the area covered by the stars.
Border is relative to max-min
"""
if len(stars) == 0:
return np.array([0, 1, 0, 1])
if len(stars) == 1:
star = stars[0]
return np.array([star['x'] - 0.5, star['x'] + 0.5, star['y'] - 0.5, star['y'] + 0.5])
(xmin, xmax) = (np.min(stars['x']), np.max(stars['x']))
(ymin, ymax) = (np.min(stars['y']), np.max(stars['y']))
xw = xmax - xmin
yw = ymax - ymin
xmin = xmin - border*xw
xmax = xmax + border*xw
ymin = ymin - border*yw
ymax = ymax + border*yw
return np.array([xmin, xmax, ymin, ymax])
def makequads1(starlist, n=7, s=0, d=50.0, verbose=True):
"""
First trivial quad maker.
Makes combis of the n brightest stars.
:param n: number of stars to consider (brightest ones).
:type n: int
:param s: how many of the brightest stars should I skip ?
This feature is useful to avoid building quads with nearly saturated stars that are not
available in other exposures.
:type s: int
:param d: minimal distance between stars
:type d: float
"""
quadlist = []
sortedstars = sortstarlistbyflux(starlist)
for fourstars in itertools.combinations(sortedstars[s:s+n], 4):
if mindist(fourstars) > d:
quadlist.append(Quad(fourstars))
if verbose:
print("Made %4i quads from %4i stars (combi n=%i s=%i d=%.1f)" % (len(quadlist), len(starlist), n, s, d))
return quadlist
def mindist(cats):
"""
Function that tests if 4 stars are suitable to make a good quad...
"""
tests = [(0,1), (0,2), (0,3), (1,2), (1,3), (2,3)]
dists = np.array([np.linalg.norm(np.array(cats[0]['x'], cats[0]['y']) - np.array(cats[1]['x'], cats[1]['y'])) for (i,j) in tests])
return np.min(dists)
def removeduplicates(quadlist, verbose=True):
"""
Returns a quadlist without quads with identical hashes...
"""
# To avoid crash in lexsort if quadlist is too small :
if len(quadlist) < 2:
return quadlist
hasharray = np.array([q.hash for q in quadlist])
order = np.lexsort(hasharray.T)
hasharray = hasharray[order]
#diff = np.diff(hasharray, axis=0)
diff = np.fabs(np.diff(hasharray, axis=0))
#diff = np.sum(diff, axis=1)
ui = np.ones(len(hasharray), 'bool')
ui[1:] = (diff >= 0.000001).any(axis=1)
#print(hasharray[ui==False])
if verbose:
print("Removing %i/%i duplicates" % (len(quadlist) - np.sum(ui), len(quadlist)))
return [quad for (quad, u) in zip(quadlist, ui) if u == True]
|
getrotation
|
sql.rs
|
use crate::errors::ConnectorXError;
#[cfg(feature = "src_oracle")]
use crate::sources::oracle::OracleDialect;
use fehler::{throw, throws};
use log::{debug, trace, warn};
use sqlparser::ast::{
BinaryOperator, Expr, Function, FunctionArg, Ident, ObjectName, Query, Select, SelectItem,
SetExpr, Statement, TableAlias, TableFactor, TableWithJoins, Value,
};
use sqlparser::dialect::Dialect;
use sqlparser::parser::Parser;
#[cfg(feature = "src_oracle")]
use std::any::Any;
#[derive(Debug, Clone)]
pub enum CXQuery<Q = String> {
Naked(Q), // The query directly comes from the user
Wrapped(Q), // The user query is already wrapped in a subquery
}
impl<Q: std::fmt::Display> std::fmt::Display for CXQuery<Q> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CXQuery::Naked(q) => write!(f, "{}", q),
CXQuery::Wrapped(q) => write!(f, "{}", q),
}
}
}
impl<Q: AsRef<str>> CXQuery<Q> {
pub fn as_str(&self) -> &str {
match self {
CXQuery::Naked(q) => q.as_ref(),
CXQuery::Wrapped(q) => q.as_ref(),
}
}
}
impl From<&str> for CXQuery {
fn from(s: &str) -> CXQuery<String> {
CXQuery::Naked(s.to_string())
}
}
impl From<&&str> for CXQuery {
fn from(s: &&str) -> CXQuery<String> {
CXQuery::Naked(s.to_string())
}
}
impl From<&String> for CXQuery {
fn from(s: &String) -> CXQuery {
CXQuery::Naked(s.clone())
}
}
impl From<&CXQuery> for CXQuery {
fn from(q: &CXQuery) -> CXQuery {
q.clone()
}
}
impl CXQuery<String> {
pub fn naked<Q: AsRef<str>>(q: Q) -> Self {
CXQuery::Naked(q.as_ref().to_string())
}
}
impl<Q: AsRef<str>> AsRef<str> for CXQuery<Q> {
fn as_ref(&self) -> &str {
match self {
CXQuery::Naked(q) => q.as_ref(),
CXQuery::Wrapped(q) => q.as_ref(),
}
}
}
impl<Q> CXQuery<Q> {
pub fn map<F, U>(&self, f: F) -> CXQuery<U>
where
F: Fn(&Q) -> U,
{
match self {
CXQuery::Naked(q) => CXQuery::Naked(f(q)),
CXQuery::Wrapped(q) => CXQuery::Wrapped(f(q)),
}
}
}
impl<Q, E> CXQuery<Result<Q, E>> {
pub fn result(self) -> Result<CXQuery<Q>, E> {
match self {
CXQuery::Naked(q) => q.map(CXQuery::Naked),
CXQuery::Wrapped(q) => q.map(CXQuery::Wrapped),
}
}
}
// wrap a query into a derived table
fn wrap_query(
query: &mut Query,
projection: Vec<SelectItem>,
selection: Option<Expr>,
tmp_tab_name: &str,
) -> Statement {
let with = query.with.clone();
query.with = None;
let alias = if tmp_tab_name.is_empty() {
None
} else {
Some(TableAlias {
name: Ident {
value: tmp_tab_name.into(),
quote_style: None,
},
columns: vec![],
})
};
Statement::Query(Box::new(Query {
with,
body: SetExpr::Select(Box::new(Select {
distinct: false,
top: None,
projection,
from: vec![TableWithJoins {
relation: TableFactor::Derived {
lateral: false,
subquery: Box::new(query.clone()),
alias,
},
joins: vec![],
}],
lateral_views: vec![],
selection,
group_by: vec![],
cluster_by: vec![],
distribute_by: vec![],
sort_by: vec![],
having: None,
})),
order_by: vec![],
limit: None,
offset: None,
fetch: None,
}))
}
trait StatementExt {
fn as_query(&self) -> Option<&Query>;
}
impl StatementExt for Statement {
fn as_query(&self) -> Option<&Query> {
match self {
Statement::Query(q) => Some(q),
_ => None,
}
}
}
trait QueryExt {
fn as_select_mut(&mut self) -> Option<&mut Select>;
}
impl QueryExt for Query {
fn as_select_mut(&mut self) -> Option<&mut Select> {
match self.body {
SetExpr::Select(ref mut select) => Some(select),
_ => None,
}
}
}
#[throws(ConnectorXError)]
pub fn count_query<T: Dialect>(sql: &CXQuery<String>, dialect: &T) -> CXQuery<String> {
trace!("Incoming query: {}", sql);
const COUNT_TMP_TAB_NAME: &str = "CXTMPTAB_COUNT";
#[allow(unused_mut)]
let mut table_alias = COUNT_TMP_TAB_NAME;
// HACK: Some dialect (e.g. Oracle) does not support "AS" for alias
#[cfg(feature = "src_oracle")]
if dialect.type_id() == (OracleDialect {}.type_id()) {
table_alias = "";
}
let tsql = match sql.map(|sql| Parser::parse_sql(dialect, sql)).result() {
Ok(ast) => {
let projection = vec![SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "count".to_string(),
quote_style: None,
}]),
args: vec![FunctionArg::Unnamed(Expr::Wildcard)],
over: None,
distinct: false,
}))];
let ast_count: Statement = match ast {
CXQuery::Naked(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
query.order_by = vec![];
let select = query
.as_select_mut()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?;
select.sort_by = vec![];
wrap_query(&mut query, projection, None, table_alias)
}
CXQuery::Wrapped(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let select = query
.as_select_mut()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?;
select.projection = projection;
Statement::Query(Box::new(query))
}
};
format!("{}", ast_count)
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!(
"SELECT COUNT(*) FROM ({}) as {}",
sql.as_str(),
COUNT_TMP_TAB_NAME
)
}
};
debug!("Transformed count query: {}", tsql);
CXQuery::Wrapped(tsql)
}
#[throws(ConnectorXError)]
pub fn limit1_query<T: Dialect>(sql: &CXQuery<String>, dialect: &T) -> CXQuery<String> {
trace!("Incoming query: {}", sql);
let sql = match Parser::parse_sql(dialect, sql.as_str()) {
Ok(mut ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
match &mut ast[0] {
Statement::Query(q) => {
q.limit = Some(Expr::Value(Value::Number("1".to_string(), false)));
}
_ => throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string())),
};
format!("{}", ast[0])
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!("{} LIMIT 1", sql.as_str())
}
};
debug!("Transformed limit 1 query: {}", sql);
CXQuery::Wrapped(sql)
}
#[throws(ConnectorXError)]
#[cfg(feature = "src_oracle")]
pub fn limit1_query_oracle(sql: &CXQuery<String>) -> CXQuery<String> {
trace!("Incoming oracle query: {}", sql);
let ast = Parser::parse_sql(&OracleDialect {}, sql.as_str())?;
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let ast_part: Statement;
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let selection = Expr::BinaryOp {
left: Box::new(Expr::CompoundIdentifier(vec![Ident {
value: "rownum".to_string(),
quote_style: None,
}])),
op: BinaryOperator::Eq,
right: Box::new(Expr::Value(Value::Number("1".to_string(), false))),
};
ast_part = wrap_query(&mut query, vec![SelectItem::Wildcard], Some(selection), "");
let tsql = format!("{}", ast_part);
debug!("Transformed limit 1 query: {}", tsql);
CXQuery::Wrapped(tsql)
}
#[throws(ConnectorXError)]
pub fn single_col_partition_query<T: Dialect>(
sql: &str,
col: &str,
lower: i64,
upper: i64,
dialect: &T,
) -> String {
trace!("Incoming query: {}", sql);
const PART_TMP_TAB_NAME: &str = "CXTMPTAB_PART";
#[allow(unused_mut)]
let mut table_alias = PART_TMP_TAB_NAME;
#[allow(unused_mut)]
let mut cid = Box::new(Expr::CompoundIdentifier(vec![
Ident {
value: PART_TMP_TAB_NAME.to_string(),
quote_style: None,
},
Ident {
value: col.to_string(),
quote_style: None,
},
]));
// HACK: Some dialect (e.g. Oracle) does not support "AS" for alias
#[cfg(feature = "src_oracle")]
if dialect.type_id() == (OracleDialect {}.type_id()) {
table_alias = "";
cid = Box::new(Expr::Identifier(Ident {
value: col.to_string(),
quote_style: None,
}));
}
let tsql = match Parser::parse_sql(dialect, sql) {
Ok(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let select = query
.as_select_mut()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let ast_part: Statement;
let lb = Expr::BinaryOp {
left: Box::new(Expr::Value(Value::Number(lower.to_string(), false))),
op: BinaryOperator::LtEq,
right: cid.clone(),
};
let ub = Expr::BinaryOp {
left: cid,
op: BinaryOperator::Lt,
right: Box::new(Expr::Value(Value::Number(upper.to_string(), false))),
};
let selection = Expr::BinaryOp {
left: Box::new(lb),
op: BinaryOperator::And,
right: Box::new(ub),
};
if query.limit.is_none() && select.top.is_none() && !query.order_by.is_empty() {
// order by in a partition query does not make sense because partition is unordered.
// clear the order by beceause mssql does not support order by in a derived table.
// also order by in the derived table does not make any difference.
query.order_by.clear();
}
ast_part = wrap_query(
&mut query,
vec![SelectItem::Wildcard],
Some(selection),
table_alias,
);
format!("{}", ast_part)
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!("SELECT * FROM ({}) AS CXTMPTAB_PART WHERE CXTMPTAB_PART.{} >= {} AND CXTMPTAB_PART.{} < {}", sql, col, lower, col, upper)
}
};
debug!("Transformed single column partition query: {}", tsql);
tsql
}
#[throws(ConnectorXError)]
pub fn get_partition_range_query<T: Dialect>(sql: &str, col: &str, dialect: &T) -> String {
trace!("Incoming query: {}", sql);
const RANGE_TMP_TAB_NAME: &str = "CXTMPTAB_RANGE";
#[allow(unused_mut)]
let mut table_alias = RANGE_TMP_TAB_NAME;
#[allow(unused_mut)]
let mut args = vec![FunctionArg::Unnamed(Expr::CompoundIdentifier(vec![
Ident {
value: RANGE_TMP_TAB_NAME.to_string(),
quote_style: None,
},
Ident {
value: col.to_string(),
quote_style: None,
},
]))];
// HACK: Some dialect (e.g. Oracle) does not support "AS" for alias
#[cfg(feature = "src_oracle")]
if dialect.type_id() == (OracleDialect {}.type_id()) {
table_alias = "";
args = vec![FunctionArg::Unnamed(Expr::Identifier(Ident {
value: col.to_string(),
quote_style: None,
}))];
}
let tsql = match Parser::parse_sql(dialect, sql) {
Ok(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let ast_range: Statement;
query.order_by = vec![];
let projection = vec![
SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "min".to_string(),
quote_style: None,
}]),
args: args.clone(),
over: None,
distinct: false,
})),
SelectItem::UnnamedExpr(Expr::Function(Function {
|
name: ObjectName(vec![Ident {
value: "max".to_string(),
quote_style: None,
}]),
args,
over: None,
distinct: false,
})),
];
ast_range = wrap_query(&mut query, projection, None, table_alias);
format!("{}", ast_range)
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
format!(
"SELECT MIN({}.{}) as min, MAX({}.{}) as max FROM ({}) AS {}",
RANGE_TMP_TAB_NAME, col, RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
)
}
};
debug!("Transformed partition range query: {}", tsql);
tsql
}
#[throws(ConnectorXError)]
pub fn get_partition_range_query_sep<T: Dialect>(
sql: &str,
col: &str,
dialect: &T,
) -> (String, String) {
trace!("Incoming query: {}", sql);
const RANGE_TMP_TAB_NAME: &str = "CXTMPTAB_RANGE";
let (sql_min, sql_max) = match Parser::parse_sql(dialect, sql) {
Ok(ast) => {
if ast.len() != 1 {
throw!(ConnectorXError::SqlQueryNotSupported(sql.to_string()));
}
let mut query = ast[0]
.as_query()
.ok_or_else(|| ConnectorXError::SqlQueryNotSupported(sql.to_string()))?
.clone();
let ast_range_min: Statement;
let ast_range_max: Statement;
query.order_by = vec![];
let min_proj = vec![SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "min".to_string(),
quote_style: None,
}]),
args: vec![FunctionArg::Unnamed(Expr::CompoundIdentifier(vec![
Ident {
value: RANGE_TMP_TAB_NAME.to_string(),
quote_style: None,
},
Ident {
value: col.to_string(),
quote_style: None,
},
]))],
over: None,
distinct: false,
}))];
let max_proj = vec![SelectItem::UnnamedExpr(Expr::Function(Function {
name: ObjectName(vec![Ident {
value: "max".to_string(),
quote_style: None,
}]),
args: vec![FunctionArg::Unnamed(Expr::CompoundIdentifier(vec![
Ident {
value: RANGE_TMP_TAB_NAME.into(),
quote_style: None,
},
Ident {
value: col.into(),
quote_style: None,
},
]))],
over: None,
distinct: false,
}))];
ast_range_min = wrap_query(&mut query.clone(), min_proj, None, RANGE_TMP_TAB_NAME);
ast_range_max = wrap_query(&mut query, max_proj, None, RANGE_TMP_TAB_NAME);
(format!("{}", ast_range_min), format!("{}", ast_range_max))
}
Err(e) => {
warn!("parser error: {:?}, manually compose query string", e);
(
format!(
"SELECT MIN({}.{}) as min FROM ({}) AS {}",
RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
),
format!(
"SELECT MAX({}.{}) as max FROM ({}) AS {}",
RANGE_TMP_TAB_NAME, col, sql, RANGE_TMP_TAB_NAME
),
)
}
};
debug!(
"Transformed separated partition range query: {}, {}",
sql_min, sql_max
);
(sql_min, sql_max)
}
| |
manage.py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
|
if __name__ == '__main__':
main()
|
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'phogram.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
command.rs
|
use crate::chain_spec;
use crate::cli::{Cli, Subcommand};
use crate::service;
use sc_cli::{SubstrateCli, RuntimeVersion, Role, ChainSpec};
use sc_service::PartialComponents;
use crate::service::new_partial;
impl SubstrateCli for Cli {
fn impl_name() -> String {
"Prism Node".into()
}
fn impl_version() -> String {
env!("SUBSTRATE_CLI_IMPL_VERSION").into()
}
fn description() -> String {
env!("CARGO_PKG_DESCRIPTION").into()
}
fn author() -> String {
env!("CARGO_PKG_AUTHORS").into()
}
fn support_url() -> String {
"support.anonymous.an".into()
}
fn copyright_start_year() -> i32 {
2017
}
fn load_spec(&self, id: &str) -> Result<Box<dyn sc_service::ChainSpec>, String> {
Ok(match id {
"dev" => Box::new(chain_spec::development_config()?),
"" | "local" => Box::new(chain_spec::local_testnet_config()?),
path => Box::new(chain_spec::ChainSpec::from_json_file(
std::path::PathBuf::from(path),
)?),
})
}
fn native_runtime_version(_: &Box<dyn ChainSpec>) -> &'static RuntimeVersion {
&shadows_runtime::VERSION
}
}
/// Parse and run command line arguments
pub fn run() -> sc_cli::Result<()>
|
{
let cli = Cli::from_args();
match &cli.subcommand {
Some(Subcommand::BuildSpec(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.sync_run(|config| cmd.run(config.chain_spec, config.network))
},
Some(Subcommand::CheckBlock(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.async_run(|config| {
let PartialComponents { client, task_manager, import_queue, ..}
= new_partial(&config, &cli)?;
Ok((cmd.run(client, import_queue), task_manager))
})
},
Some(Subcommand::ExportBlocks(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.async_run(|config| {
let PartialComponents { client, task_manager, ..}
= new_partial(&config, &cli)?;
Ok((cmd.run(client, config.database), task_manager))
})
},
Some(Subcommand::ExportState(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.async_run(|config| {
let PartialComponents { client, task_manager, ..}
= new_partial(&config, &cli)?;
Ok((cmd.run(client, config.chain_spec), task_manager))
})
},
Some(Subcommand::ImportBlocks(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.async_run(|config| {
let PartialComponents { client, task_manager, import_queue, ..}
= new_partial(&config, &cli)?;
Ok((cmd.run(client, import_queue), task_manager))
})
},
Some(Subcommand::PurgeChain(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.sync_run(|config| cmd.run(config.database))
},
Some(Subcommand::Revert(cmd)) => {
let runner = cli.create_runner(cmd)?;
runner.async_run(|config| {
let PartialComponents { client, task_manager, backend, ..}
= new_partial(&config, &cli)?;
Ok((cmd.run(client, backend), task_manager))
})
},
None => {
let runner = cli.create_runner(&cli.run.base)?;
runner.run_node_until_exit(|config| async move {
match config.role {
Role::Light => service::new_light(config),
_ => service::new_full(config, &cli),
}.map_err(sc_cli::Error::Service)
})
}
}
}
|
|
git.py
|
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=invalid-name, too-few-public-methods
"""git template"""
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import logging # pylint: disable=unused-import
import os
import posixpath
import re
import subprocess
import hpccm.base_object
class
|
(hpccm.base_object):
"""Template for working with git repositories"""
def __init__(self, **kwargs):
"""Initialize template"""
super(git, self).__init__(**kwargs)
self.git_opts = kwargs.get('opts', ['--depth=1'])
def __verify(self, repository, branch=None, commit=None, fatal=False):
"""Verify that the specific git reference exists in the remote
repository"""
if not branch and not commit: # pragma: no cover
# Should have already been caught before calling this function
logging.warning('Must specify one of branch or commit, '
'skipping verification')
return
command = 'git ls-remote {0} | grep {1}'.format(repository, commit)
ref = commit
if branch:
command = 'git ls-remote --exit-code --heads {0} {1}'.format(repository, branch)
ref = branch
with open(os.devnull, 'w') as DEVNULL:
p = subprocess.Popen(command, shell=True, stdout=DEVNULL,
stderr=DEVNULL)
o = p.communicate()
if p.returncode != 0:
if fatal:
raise RuntimeError('git ref "{}" does not exist'.format(ref))
else:
logging.warning('git ref "{}" does not exist'.format(ref))
return
def clone_step(self, branch=None, commit=None, directory='', path='/tmp',
repository=None, verify=None, lfs=False):
"""Clone a git repository"""
if not repository:
logging.warning('No git repository specified')
return ''
if branch and commit: # pragma: no cover
logging.warning('Both branch and commit specified, ' +
'ignoring branch and using commit...')
if not directory:
# Use the final entry in the repository as the directory,
# stripping off any '.git'. This is the default git
# behavior, but the directory may be explicitly needed
# below.
directory = posixpath.splitext(posixpath.basename(repository))[0]
# Copy so not to modify the member variable
opts = list(self.git_opts)
# Commit has precedence over branch
if branch and not commit:
opts.append('--branch {}'.format(branch))
opt_string = ' '.join(opts)
if commit:
# Likely need the full repository history, so remove
# '--depth' if present
opt_string = re.sub(r'--depth=\d+\s*', '', opt_string).strip()
# Verify the commit / branch is valid
if verify:
fatal = False
if verify == 'fatal':
fatal = True
self.__verify(repository, branch=branch, commit=commit,
fatal=fatal)
# If lfs=True use `git lfs clone`
lfs_string = " "
if lfs:
lfs_string = " lfs "
# Ensure the path exists
# Would prefer to use 'git -C', but the ancient git included
# with CentOS7 does not support that option.
clone = ['mkdir -p {0}'.format(path),
'cd {0}'.format(path),
'git{0}clone {1} {2} {3}'.format(
lfs_string, opt_string, repository, directory).strip(),
'cd -']
if commit:
clone.extend(['cd {0}'.format(posixpath.join(path, directory)),
'git checkout {0}'.format(commit),
'cd -'])
return ' && '.join(clone)
|
git
|
DataManager.stories.tsx
|
import React from 'react';
import faker from 'faker';
import { includes, range, uniqBy } from 'lodash';
import { Button } from 'z-frontend-elements';
import { Checkbox, InputWithIcon } from 'z-frontend-forms';
import { Box, Flex } from 'zbase';
import { storiesOf } from '../.storybook/storyHelpers';
import DataManager, { DataManagerRenderProps } from './DataManager';
import { updateFilters } from './filterUtils';
import { updateSorter } from './sortUtils';
import Pager from './Pager';
const pageSize = 20;
interface EmployeeType {
id: number;
name: string;
company: string;
department: string;
}
const getEmployees: (number: number) => EmployeeType[] = num => {
faker.seed(123);
return range(num).map(id => ({
id,
name: faker.name.findName(),
company: faker.company.companyName(),
department: faker.commerce.department(),
}));
};
const employees = getEmployees(50);
const allDepartments = uniqBy(employees, 'department').map(e => e.department);
const initialNameFilter = { name: { stringContains: 'er' } };
const initialDeptFilter = updateFilters({}, 'matchAny', 'department', 'Automotive', true);
const ListOfEmployees = ({ arr }: any) => (
<ul>
{arr.map((a: any) => (
<li key={a.id}>
{a.name} (Dept: <i>{a.department}</i>)
</li>
))}
</ul>
);
const DepartmentCheckboxesFilter = ({ allDepts, filterConfig, onFilterChange }: any) => {
const selectedDepartments = (filterConfig.department || {}).matchAny;
return (
<Box>
Filter departments:{' '}
{allDepts.map((dept: any) => (
<Checkbox
key={dept}
label={dept}
checked={includes(selectedDepartments, dept)}
onChange={(e: any) =>
onFilterChange(updateFilters(filterConfig, 'matchAny', 'department', dept, e.target.checked))
}
/>
))}
</Box>
);
};
const NameSearchFilter = ({ filterConfig, onFilterChange }: any) => (
<Box w={1 / 3}>
Search names:{' '}
<InputWithIcon
rightIconName="search"
s="small"
value={(filterConfig.name || {}).stringContains || ''}
onChange={e => onFilterChange(updateFilters(filterConfig, 'stringContains', 'name', e.target.value, true))}
/>
|
</Box>
);
const SortButton = ({ field, sortConfig, onSortChange }: any) => (
<Button
m={2}
mode="primary"
disabled={(sortConfig[0] || { key: '' }).key === field}
onClick={e => onSortChange(updateSorter(sortConfig, field, true))}
>
Sort by {field}
</Button>
);
storiesOf('data-manager|DataManager', module)
.add('no filter', () => (
<DataManager
sourceData={employees}
render={(managerProps: DataManagerRenderProps<EmployeeType>) => (
<Box p={3}>
<h3>Full List ({employees.length} employees)</h3>
{/* This component actually renders the final data */}
<ListOfEmployees arr={managerProps.displayData} />
</Box>
)}
/>
))
.add('filtering (stringContains)', () => (
<DataManager
sourceData={employees}
initialFilter={initialNameFilter}
render={(managerProps: DataManagerRenderProps<EmployeeType>) => (
<Box p={3}>
<h3>Search (stringContains)</h3>
<NameSearchFilter
filterConfig={managerProps.filtering.config}
onFilterChange={managerProps.filtering.onChange}
/>
<b>Results:</b>
{/* This component actually renders the final data */}
<ListOfEmployees arr={managerProps.displayData} />
</Box>
)}
/>
))
.add('filtering (matchAny)', () => (
<DataManager
sourceData={employees}
initialFilter={initialDeptFilter}
render={(managerProps: DataManagerRenderProps<EmployeeType>) => (
<Box p={3}>
<h3>Checkboxes (matchAny)</h3>
<DepartmentCheckboxesFilter
allDepts={allDepartments}
filterConfig={managerProps.filtering.config}
onFilterChange={managerProps.filtering.onChange}
/>
<b>Results:</b>
{/* This component actually renders the final data */}
<ListOfEmployees arr={managerProps.displayData} />
</Box>
)}
/>
))
.add('sorting', () => (
<DataManager
sourceData={employees}
render={(managerProps: DataManagerRenderProps<EmployeeType>) => (
<Box p={3}>
<h3>Sorting a List by specific keys</h3>
<SortButton
field={'name'}
sortConfig={managerProps.sorting.config}
onSortChange={managerProps.sorting.onChange}
/>
<SortButton
field={'department'}
sortConfig={managerProps.sorting.config}
onSortChange={managerProps.sorting.onChange}
/>
{/* This component actually renders the final data */}
<ListOfEmployees arr={managerProps.displayData} />
</Box>
)}
/>
))
.add('paging', () => (
<DataManager
sourceData={employees}
initialPageSize={pageSize}
render={(managerProps: DataManagerRenderProps<EmployeeType>) => (
<Box p={3}>
<h3>Paged List (page size: {pageSize})</h3>
{/* This component actually renders the final data */}
<ListOfEmployees arr={managerProps.displayData} />
<Pager
pageSize={managerProps.paging.pageSize}
currentPage={managerProps.paging.currentPage}
totalItemsCount={managerProps.paging.inputData.length}
onPageChange={managerProps.paging.onPageChange}
/>
</Box>
)}
/>
))
.add('filtering, sorting and paging', () => (
<DataManager
sourceData={employees}
initialPageSize={pageSize}
render={(managerProps: DataManagerRenderProps<EmployeeType>) => (
<Box p={3}>
<h3>Filtering, Sorting, and Pagination</h3>
<Flex>
<Box w={1 / 6}>
<NameSearchFilter
filterConfig={managerProps.filtering.config}
onFilterChange={managerProps.filtering.onChange}
/>
<DepartmentCheckboxesFilter
allDepts={allDepartments}
filterConfig={managerProps.filtering.config}
onFilterChange={managerProps.filtering.onChange}
/>
</Box>
<Box w={2 / 3} p={3}>
<SortButton
field={'name'}
sortConfig={managerProps.sorting.config}
onSortChange={managerProps.sorting.onChange}
/>
<SortButton
field={'department'}
sortConfig={managerProps.sorting.config}
onSortChange={managerProps.sorting.onChange}
/>
{/* This component actually renders the final data */}
<ListOfEmployees arr={managerProps.displayData} />
<Pager
pageSize={managerProps.paging.pageSize}
currentPage={managerProps.paging.currentPage}
totalItemsCount={managerProps.paging.inputData.length}
onPageChange={managerProps.paging.onPageChange}
/>
</Box>
</Flex>
</Box>
)}
/>
));
| |
vae_gan.py
|
"""Training mechanism for VAE-GAN"""
import os
import time
import logging
import numpy as np
import torch
import torch.nn.functional as F
from spml import (
image_util,
loss_utils,
)
from . import (
misc_utils,
saved_model_manager,
)
_LG = logging.getLogger(__name__)
def _save_images(images, src_path, step, output_dir):
src_name = os.path.splitext(os.path.basename(src_path))[0]
save_path = os.path.join(
output_dir, 'images', src_name, 'step_%d.png' % step)
misc_utils.ensure_dir(save_path)
images = [img.detach().cpu().numpy() for img in images]
images = np.concatenate(images, axis=1)
image_util.save_image(images, save_path)
def _log_header():
fields = ' '.join(['%10s'] * 9) % (
'KLD', 'BETA', 'F_RECON',
'G_RECON', 'G_FAKE', 'D_REAL', 'D_RECON', 'D_FAKE', '[PIXEL]',
)
_LG.info('%5s %5s: %s', '', 'PHASE', fields)
_LOGGED = {'last': 0}
def _log_loss(loss, phase, progress=None):
if _LOGGED['last'] % 30 == 0:
_log_header()
_LOGGED['last'] += 1
header = '' if progress is None else '%3d %%' % progress
fields = ' '.join(['%10.2e'] * 9) % (
loss['kld'], loss['beta'], loss['feats_recon'],
loss['gen_recon'], loss['gen_fake'],
loss['disc_orig'], loss['disc_recon'], loss['disc_fake'],
loss['pixel'],
)
_LG.info('%5s %5s: %s', header, phase, fields)
def _get_latent_stats(z, z_std):
z = z.detach().cpu().numpy()
z_std = z_std.detach().cpu().numpy()
return {
'z_mean': np.mean(z),
'z_min': np.min(z),
'z_max': np.max(z),
'z_var': np.var(z),
'z_std_mean': np.mean(z_std),
'z_std_min': np.min(z_std),
'z_std_max': np.max(z_std),
'z_std_var': np.var(z_std),
}
class Trainer:
def __init__(
self, model, optimizers,
train_loader, test_loader,
device, output_dir,
initial_beta=10.0,
beta_step=0.1,
target_kld=0.1,
beta_momentum=0.1,
samples=None,
):
self.model = model.float().to(device)
self.train_loader = train_loader
self.test_loader = test_loader
self.optimizers = optimizers
self.device = device
self.output_dir = output_dir
self.beta = initial_beta
self.beta_step = beta_step
self.target_kld = target_kld
self.beta_momentum = beta_momentum
self.samples = samples
self.saved_model_manager = saved_model_manager.SavedModelManager()
fields = [
'PHASE', 'TIME', 'STEP', 'EPOCH', 'KLD', 'BETA', 'F_RECON',
'G_RECON', 'G_FAKE', 'D_REAL', 'D_RECON', 'D_FAKE', 'PIXEL',
'Z_MEAN', 'Z_MIN', 'Z_MAX', 'Z_VAR',
'Z_STD_MEAN', 'Z_STD_MIN', 'Z_STD_MAX', 'Z_STD_VAR',
]
logfile = open(os.path.join(output_dir, 'result.csv'), 'w')
self.writer = misc_utils.CSVWriter(fields, logfile)
self.step = 0
self.epoch = 0
self.latent_stats = loss_utils.MovingStats(beta_momentum)
def _write(self, phase, loss, stats):
self.writer.write(
PHASE=phase, STEP=self.step, EPOCH=self.epoch, TIME=time.time(),
KLD=loss['kld'], BETA=loss['beta'],
F_RECON=loss['feats_recon'],
G_RECON=loss['gen_recon'], G_FAKE=loss['gen_fake'],
D_REAL=loss['disc_orig'],
D_RECON=loss['disc_recon'], D_FAKE=loss['disc_fake'],
PIXEL=loss['pixel'],
Z_MEAN=stats['z_mean'], Z_VAR=stats['z_var'],
Z_MIN=stats['z_min'], Z_MAX=stats['z_max'],
Z_STD_MEAN=stats['z_std_mean'], Z_STD_VAR=stats['z_std_var'],
Z_STD_MIN=stats['z_std_min'], Z_STD_MAX=stats['z_std_max'],
)
def save(self):
filename = 'epoch_%s_step_%s.pt' % (self.epoch, self.step)
output = os.path.join(self.output_dir, 'checkpoints', filename)
_LG.info('Saving checkpoint at %s', output)
misc_utils.ensure_dir(output)
torch.save({
'model': self.model.state_dict(),
'optimizers': {
key: opt.state_dict()
for key, opt in self.optimizers.items()
},
'epoch': self.epoch,
'step': self.step,
}, output)
return output
def manage_saved(self, path, loss):
path = self.saved_model_manager.update(path, loss)
if path:
os.remove(path)
def load(self, checkpoint):
_LG.info('Loading checkpoint from %s', checkpoint)
data = torch.load(checkpoint, map_location=self.device)
self.model.load_state_dict(data['model'])
for key, opt in data['optimizers'].items():
self.optimizers[key].load_state_dict(opt)
self.epoch = data['epoch']
self.step = data['step']
def _forward_gan(self, orig, update=False):
# Update discriminator with original image
preds_orig, _ = self.model.discriminator(orig)
disc_loss_orig = loss_utils.bce(preds_orig, 1)
if update:
self.model.zero_grad()
disc_loss_orig.backward()
self.optimizers['discriminator'].step()
# Update discriminator with reconstructed image
recon, latent = self.model.vae(orig)
preds_recon, _ = self.model.discriminator(recon.detach())
disc_loss_recon = loss_utils.bce(preds_recon, 0)
if update:
self.model.zero_grad()
disc_loss_recon.backward()
self.optimizers['discriminator'].step()
# Update generator with reconstructed image
preds_recon, _ = self.model.discriminator(recon)
gen_loss_recon = loss_utils.bce(preds_recon, 1)
if update:
self.model.zero_grad()
gen_loss_recon.backward()
self.optimizers['decoder'].step()
# Update discriminator with fake image
sample = torch.randn_like(latent[0], requires_grad=True)
fake = self.model.vae.decoder(sample)
preds_fake, _ = self.model.discriminator(fake.detach())
disc_loss_fake = loss_utils.bce(preds_fake, 0)
if update:
self.model.zero_grad()
disc_loss_fake.backward()
self.optimizers['discriminator'].step()
# Update generator with fake image
preds_fake, _ = self.model.discriminator(fake)
gen_loss_fake = loss_utils.bce(preds_fake, 1)
if update:
self.model.zero_grad()
gen_loss_fake.backward()
self.optimizers['decoder'].step()
return {
'disc_orig': disc_loss_orig.item(),
'disc_recon': disc_loss_recon.item(),
'disc_fake': disc_loss_fake.item(),
'gen_recon': gen_loss_recon.item(),
'gen_fake': gen_loss_fake.item(),
}
def _forward_vae(self, orig, update=False):
# Update feature
|
def _get_pixel_loss(self, orig):
recon, _ = self.model.vae(orig)
return F.mse_loss(orig, recon)
def _forward(self, orig, update=False):
loss_gan = self._forward_gan(orig, update=update)
recon, loss_vae, stats = self._forward_vae(orig, update=update)
with torch.no_grad():
pixel_loss = self._get_pixel_loss(orig)
loss = {'pixel': pixel_loss.item()}
loss.update(loss_vae)
loss.update(loss_gan)
return recon, loss, stats
def train_batch(self, batch):
self.model.train()
orig = batch['image'].float().to(self.device)
_, loss, stats = self._forward(orig, update=True)
self._write('train', loss, stats)
return loss
def test(self):
with torch.no_grad():
return self._test()
def _test(self):
self.model.eval()
loss_tracker = misc_utils.StatsTracker()
stats_tracker = misc_utils.StatsTracker()
for i, batch in enumerate(self.test_loader):
orig, path = batch['image'].float().to(self.device), batch['path']
recon, loss, stats = self._forward(orig, update=False)
loss_tracker.update(loss)
stats_tracker.update(stats)
if i % 10 == 0:
_save_images(
(orig[0], recon[0]), path[0],
self.step, self.output_dir)
self._write('test', loss_tracker, stats_tracker)
_log_loss(loss_tracker, phase='Test')
return loss_tracker
def generate(self, samples=None):
samples = self.samples if samples is None else samples
with torch.no_grad():
self._generate(samples)
def _generate(self, samples):
self.model.eval()
recons = self.model.vae.decoder(samples)
for i, recon in enumerate(recons):
path = 'sample_%d.png' % i
_save_images([recon], path, self.step, self.output_dir)
def train_one_epoch(self, report_every=180, test_interval=1000):
last_report = 0
for i, batch in enumerate(self.train_loader):
loss = self.train_batch(batch)
self.step += 1
if time.time() - last_report > report_every:
progress = 100. * i / len(self.train_loader)
_log_loss(loss, 'Train', progress)
last_report = time.time()
if self.step % test_interval == 0:
self.generate()
loss = self.test()
path = self.save()
self.manage_saved(path, loss['pixel'])
self.epoch += 1
def __repr__(self):
opt = '\n'.join([
'%s: %s' % (key, val) for key, val in self.optimizers.items()
])
beta = '\n'.join([
'Beta: %s' % self.beta,
'Beta Step: %s' % self.beta_step,
'Target KLD: %s' % self.target_kld,
'Beta Momuntum: %s' % self.beta_momentum,
])
return 'Epoch: %d\nStep: %d\nModel: %s\nOptimizers: %s\n%s\n' % (
self.epoch, self.step, self.model, opt, beta
)
|
recon, _ = self.model.vae(orig)
_, feats_orig = self.model.discriminator(orig)
_, feats_recon = self.model.discriminator(recon)
feats_loss = F.mse_loss(input=feats_recon, target=feats_orig)
if update:
self.model.zero_grad()
feats_loss.backward()
self.optimizers['encoder'].step()
self.optimizers['decoder'].step()
# KLD
sample, latent = self.model.vae.encoder(orig)
latent_stats = self.latent_stats(sample, update)
kld = torch.mean(loss_utils.kld_loss(*latent_stats))
if update:
beta_latent_loss = self.beta * kld
self.model.zero_grad()
beta_latent_loss.backward()
self.optimizers['encoder'].step()
# Adjust beta
if update:
kld_error = kld.item() - self.target_kld
self.beta += self.beta_step * kld_error
self.beta = max(1e-3, self.beta)
loss = {
'kld': kld.item(),
'beta': self.beta,
'feats_recon': feats_loss.item(),
}
stats = _get_latent_stats(*latent)
return recon, loss, stats
|
serializers.py
|
from django.core.exceptions import ValidationError
from rest_framework.fields import CharField, ReadOnlyField
from rest_framework.relations import HyperlinkedRelatedField, SlugRelatedField
from rest_framework.serializers import (
HyperlinkedModelSerializer,
ModelSerializer,
SerializerMethodField,
)
from grandchallenge.api.swagger import swagger_schema_fields_for_charfield
from grandchallenge.cases.models import Image
from grandchallenge.reader_studies.models import (
ANSWER_TYPE_SCHEMA,
Answer,
CategoricalOption,
Question,
ReaderStudy,
)
from grandchallenge.reader_studies.tasks import add_scores
class CategoricalOptionSerializer(ModelSerializer):
class Meta:
model = CategoricalOption
fields = ("id", "title", "default")
class QuestionSerializer(HyperlinkedModelSerializer):
answer_type = CharField(source="get_answer_type_display")
reader_study = HyperlinkedRelatedField(
view_name="api:reader-study-detail", read_only=True
)
form_direction = CharField(source="get_direction_display")
image_port = CharField(source="get_image_port_display")
options = CategoricalOptionSerializer(many=True, read_only=True)
class Meta:
model = Question
fields = (
"answer_type",
"api_url",
"form_direction",
"help_text",
"image_port",
"pk",
"question_text",
"reader_study",
"required",
"options",
)
swagger_schema_fields = swagger_schema_fields_for_charfield(
answer_type=model._meta.get_field("answer_type"),
form_direction=model._meta.get_field(
"direction"
), # model.direction gets remapped
image_port=model._meta.get_field("image_port"),
)
class ReaderStudySerializer(HyperlinkedModelSerializer):
questions = QuestionSerializer(many=True, read_only=True)
hanging_list_images = SerializerMethodField()
help_text = ReadOnlyField()
case_text = ReadOnlyField(source="cleaned_case_text")
class Meta:
model = ReaderStudy
fields = (
"api_url",
"description",
"help_text",
"hanging_list_images",
"is_valid",
"pk",
"questions",
"title",
"is_educational",
"has_ground_truth",
"case_text",
"allow_answer_modification",
"allow_case_navigation",
"allow_show_all_annotations",
)
def get_hanging_list_images(self, obj: ReaderStudy):
"""Used by hanging_list_images serializer field."""
return obj.get_hanging_list_images_for_user(
user=self.context["request"].user
)
class AnswerSerializer(HyperlinkedModelSerializer):
creator = SlugRelatedField(read_only=True, slug_field="username")
question = HyperlinkedRelatedField(
view_name="api:reader-studies-question-detail",
queryset=Question.objects.all(),
)
images = HyperlinkedRelatedField(
many=True, queryset=Image.objects.all(), view_name="api:image-detail"
)
|
answer = attrs.get("answer")
if self.instance:
if (
not self.instance.question.reader_study.allow_answer_modification
):
raise ValidationError(
"This reader study does not allow answer modification."
)
if list(attrs.keys()) != ["answer"]:
raise ValidationError("Only the answer field can be modified.")
question = self.instance.question
images = self.instance.images.all()
creator = self.instance.creator
else:
question = attrs.get("question")
images = attrs.get("images")
creator = self.context.get("request").user
Answer.validate(
creator=creator,
question=question,
answer=answer,
images=images,
instance=self.instance,
)
if self.instance:
add_scores.apply_async(
kwargs={
"instance_pk": str(self.instance.pk),
"pk_set": list(
map(str, images.values_list("pk", flat=True))
),
}
)
return attrs if not self.instance else {"answer": answer}
class Meta:
model = Answer
fields = (
"answer",
"api_url",
"created",
"creator",
"images",
"pk",
"question",
"modified",
)
swagger_schema_fields = {
"properties": {"answer": {"title": "Answer", **ANSWER_TYPE_SCHEMA}}
}
|
def validate(self, attrs):
|
__init__.py
|
# coding: utf-8
"""
.. module: scieloopds
:synopsis: WSGI Application to provide SciELO Books in OPDS protocol.
.. moduleauthor:: Allison Vollmann <[email protected]>
Example configuration (aditional parameters):
.. note::
[app:main]
...
mongo_uri = mongodb://localhost:27017/scieloopds
scielo_uri = http://books.scielo.org/api/v1/
auto_sync = True
auto_sync_interval = 60
items_per_page = 20
"""
import os
import sys
import logging
from urlparse import urlparse
from datetime import datetime, timedelta
import pymongo
from pyramid.config import Configurator
from pyramid.events import NewRequest
from pyramid.settings import asbool
from .sync import main as do_sync
from .utils import get_db_connection
APP_PATH = os.path.abspath(os.path.dirname(__file__))
DEFAULT_SETTINGS = [
('mongo_uri', 'OPDS_MONGO_URI', str,
'mongodb://localhost:27017/scieloopds'),
('scielo_uri', 'OPDS_SCIELO_URI', str,
'http://books.scielo.org/api/v1'),
('auto_sync', 'OPDS_AUTO_SYNC', bool,
True),
('auto_sync_interval', 'OPDS_AUTO_SYNC_INTERVAL', int,
60*60*12),
('items_per_page', 'OPDS_ITEMS_PER_PAGE', int,
20),
]
def parse_settings(settings):
"""Analisa e retorna as configurações da app com base no arquivo .ini e env.
As variáveis de ambiente possuem precedência em relação aos valores
definidos no arquivo .ini.
"""
parsed = {}
cfg = list(DEFAULT_SETTINGS)
for name, envkey, convert, default in cfg:
value = os.environ.get(envkey, settings.get(name, default))
if convert is not None:
value = convert(value)
parsed[name] = value
return parsed
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=parse_settings(settings))
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('root', '/opds/')
config.add_route('new', '/opds/new')
config.add_route('alpha_catalog', '/opds/alpha')
config.add_route('alpha_filter', '/opds/alpha/{id}')
config.add_route('publisher_catalog', '/opds/publisher')
config.add_route('publisher_filter', '/opds/publisher/{id}')
config.add_subscriber(add_mongo_db, NewRequest)
config.add_subscriber(start_sync, NewRequest)
config.scan(ignore='scieloopds.tests')
config.add_renderer('opds', factory='scieloopds.renderers.opds_factory')
return config.make_wsgi_app()
def ensure_indexes(db):
db.book.ensure_index([('updated', pymongo.DESCENDING)])
db.book.ensure_index([('title_ascii', pymongo.ASCENDING)])
db.alpha.ensure_index([('title_ascii', pymongo.ASCENDING)])
db.publisher.ensure_index([('title_ascii', pymongo.ASCENDING)])
def add_mongo_db(event):
settings = event.request.registry.settings
db = get_db_connection(settings)
ensure_indexes(db)
event.request.db = db
def start_sync(event):
settings = event.request.registry.settings
if settings['auto_sync']:
db = event.request.db
interval = settings['auto_sync_interval']
try:
update = db.catalog.find_one()
if update:
last_u
|
else:
do_sync(settings)
except pymongo.errors.AutoReconnect as e:
logging.getLogger(__name__).error('MongoDB: %s' % e.message)
|
pdate = update['updated']
next_update = last_update + timedelta(seconds=interval)
if next_update < datetime.now():
do_sync(settings)
|
general_engine.js
|
class
|
{
salt(exponential) {
let temp_result = Math.random() - exponential;
return temp_result - Math.floor(temp_result); // between 0 and 1
}
}
module.exports = GeneralEngine;
|
GeneralEngine
|
json_deser.rs
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn parse_http_generic_error(
response: &http::Response<bytes::Bytes>,
) -> Result<aws_smithy_types::Error, aws_smithy_json::deserialize::Error> {
crate::json_errors::parse_generic_error(response.body(), response.headers())
}
pub fn deser_structure_crate_error_access_denied_exception_json_err(
value: &[u8],
mut builder: crate::error::access_denied_exception::Builder,
) -> Result<crate::error::access_denied_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_internal_server_exception_json_err(
value: &[u8],
mut builder: crate::error::internal_server_exception::Builder,
) -> Result<crate::error::internal_server_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_request_timeout_exception_json_err(
value: &[u8],
mut builder: crate::error::request_timeout_exception::Builder,
) -> Result<crate::error::request_timeout_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_resource_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::resource_not_found_exception::Builder,
) -> Result<crate::error::resource_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_service_quota_exceeded_exception_json_err(
value: &[u8],
mut builder: crate::error::service_quota_exceeded_exception::Builder,
) -> Result<
crate::error::service_quota_exceeded_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_service_unavailable_exception_json_err(
value: &[u8],
mut builder: crate::error::service_unavailable_exception::Builder,
) -> Result<crate::error::service_unavailable_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_throttling_exception_json_err(
value: &[u8],
mut builder: crate::error::throttling_exception::Builder,
) -> Result<crate::error::throttling_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_validation_exception_json_err(
value: &[u8],
mut builder: crate::error::validation_exception::Builder,
) -> Result<crate::error::validation_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_batch_create_table_rows(
value: &[u8],
mut builder: crate::output::batch_create_table_rows_output::Builder,
) -> Result<
crate::output::batch_create_table_rows_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"createdRows" => {
builder = builder.set_created_rows(
crate::json_deser::deser_map_com_amazonaws_honeycode_created_rows_map(
tokens,
)?,
);
}
"failedBatchItems" => {
builder = builder.set_failed_batch_items(
crate::json_deser::deser_list_com_amazonaws_honeycode_failed_batch_items(tokens)?
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_batch_delete_table_rows(
value: &[u8],
mut builder: crate::output::batch_delete_table_rows_output::Builder,
) -> Result<
crate::output::batch_delete_table_rows_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"failedBatchItems" => {
builder = builder.set_failed_batch_items(
crate::json_deser::deser_list_com_amazonaws_honeycode_failed_batch_items(tokens)?
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_batch_update_table_rows(
value: &[u8],
mut builder: crate::output::batch_update_table_rows_output::Builder,
) -> Result<
crate::output::batch_update_table_rows_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"failedBatchItems" => {
builder = builder.set_failed_batch_items(
crate::json_deser::deser_list_com_amazonaws_honeycode_failed_batch_items(tokens)?
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_batch_upsert_table_rows(
value: &[u8],
mut builder: crate::output::batch_upsert_table_rows_output::Builder,
) -> Result<
crate::output::batch_upsert_table_rows_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"failedBatchItems" => {
builder = builder.set_failed_batch_items(
crate::json_deser::deser_list_com_amazonaws_honeycode_failed_batch_items(tokens)?
);
}
"rows" => {
builder = builder.set_rows(
crate::json_deser::deser_map_com_amazonaws_honeycode_upsert_rows_result_map(tokens)?
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_table_data_import_job(
value: &[u8],
mut builder: crate::output::describe_table_data_import_job_output::Builder,
) -> Result<
crate::output::describe_table_data_import_job_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"jobMetadata" => {
builder = builder.set_job_metadata(
crate::json_deser::deser_structure_crate_model_table_data_import_job_metadata(tokens)?
);
}
"jobStatus" => {
builder = builder.set_job_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::TableDataImportJobStatus::from(u.as_ref())
})
})
.transpose()?,
);
}
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_get_screen_data(
value: &[u8],
mut builder: crate::output::get_screen_data_output::Builder,
) -> Result<crate::output::get_screen_data_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"nextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"results" => {
builder = builder.set_results(
crate::json_deser::deser_map_com_amazonaws_honeycode_result_set_map(
tokens,
)?,
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_automation_execution_exception_json_err(
value: &[u8],
mut builder: crate::error::automation_execution_exception::Builder,
) -> Result<
crate::error::automation_execution_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_automation_execution_timeout_exception_json_err(
value: &[u8],
mut builder: crate::error::automation_execution_timeout_exception::Builder,
) -> Result<
crate::error::automation_execution_timeout_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_invoke_screen_automation(
value: &[u8],
mut builder: crate::output::invoke_screen_automation_output::Builder,
) -> Result<
crate::output::invoke_screen_automation_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_table_columns(
value: &[u8],
mut builder: crate::output::list_table_columns_output::Builder,
) -> Result<crate::output::list_table_columns_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"nextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"tableColumns" => {
builder = builder.set_table_columns(
crate::json_deser::deser_list_com_amazonaws_honeycode_table_columns(
tokens,
)?,
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_table_rows(
value: &[u8],
mut builder: crate::output::list_table_rows_output::Builder,
) -> Result<crate::output::list_table_rows_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"columnIds" => {
builder = builder.set_column_ids(
crate::json_deser::deser_list_com_amazonaws_honeycode_resource_ids(
tokens,
)?,
);
}
"nextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"rowIdsNotFound" => {
builder = builder.set_row_ids_not_found(
crate::json_deser::deser_list_com_amazonaws_honeycode_row_id_list(
tokens,
)?,
);
}
"rows" => {
builder = builder.set_rows(
crate::json_deser::deser_list_com_amazonaws_honeycode_table_rows(
tokens,
)?,
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_tables(
value: &[u8],
mut builder: crate::output::list_tables_output::Builder,
) -> Result<crate::output::list_tables_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"nextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"tables" => {
builder = builder.set_tables(
crate::json_deser::deser_list_com_amazonaws_honeycode_tables(tokens)?,
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_query_table_rows(
value: &[u8],
mut builder: crate::output::query_table_rows_output::Builder,
) -> Result<crate::output::query_table_rows_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"columnIds" => {
builder = builder.set_column_ids(
crate::json_deser::deser_list_com_amazonaws_honeycode_resource_ids(
tokens,
)?,
);
}
"nextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"rows" => {
builder = builder.set_rows(
crate::json_deser::deser_list_com_amazonaws_honeycode_table_rows(
tokens,
)?,
);
}
"workbookCursor" => {
builder = builder.set_workbook_cursor(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_start_table_data_import_job(
value: &[u8],
mut builder: crate::output::start_table_data_import_job_output::Builder,
) -> Result<
crate::output::start_table_data_import_job_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"jobId" => {
builder = builder.set_job_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"jobStatus" => {
builder = builder.set_job_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::TableDataImportJobStatus::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn or_empty_doc(data: &[u8]) -> &[u8] {
if data.is_empty() {
b"{}"
} else {
data
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_map_com_amazonaws_honeycode_created_rows_map<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::collections::HashMap<std::string::String, std::string::String>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
let mut map = std::collections::HashMap::new();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
let key = key.to_unescaped().map(|u| u.into_owned())?;
let value = aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
map.insert(key, value);
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(map))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_failed_batch_items<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::FailedBatchItem>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_failed_batch_item(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_map_com_amazonaws_honeycode_upsert_rows_result_map<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::collections::HashMap<std::string::String, crate::model::UpsertRowsResult>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
let mut map = std::collections::HashMap::new();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
let key = key.to_unescaped().map(|u| u.into_owned())?;
let value =
crate::json_deser::deser_structure_crate_model_upsert_rows_result(
tokens,
)?;
if let Some(value) = value {
map.insert(key, value);
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(map))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_table_data_import_job_metadata<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::TableDataImportJobMetadata>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::TableDataImportJobMetadata::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"submitter" => {
builder = builder.set_submitter(
crate::json_deser::deser_structure_crate_model_import_job_submitter(tokens)?
);
}
"submitTime" => {
builder = builder.set_submit_time(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"importOptions" => {
builder = builder.set_import_options(
crate::json_deser::deser_structure_crate_model_import_options(
tokens,
)?,
);
}
"dataSource" => {
builder = builder.set_data_source(
crate::json_deser::deser_structure_crate_model_import_data_source(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_map_com_amazonaws_honeycode_result_set_map<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::collections::HashMap<std::string::String, crate::model::ResultSet>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
let mut map = std::collections::HashMap::new();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
let key = key.to_unescaped().map(|u| u.into_owned())?;
let value =
crate::json_deser::deser_structure_crate_model_result_set(tokens)?;
if let Some(value) = value {
map.insert(key, value);
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(map))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_table_columns<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::TableColumn>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_table_column(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_resource_ids<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_row_id_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_table_rows<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::TableRow>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_table_row(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_tables<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Table>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
|
pub fn deser_structure_crate_model_failed_batch_item<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::FailedBatchItem>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::FailedBatchItem::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"errorMessage" => {
builder = builder.set_error_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_upsert_rows_result<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::UpsertRowsResult>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::UpsertRowsResult::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"rowIds" => {
builder = builder.set_row_ids(
crate::json_deser::deser_list_com_amazonaws_honeycode_row_id_list(tokens)?
);
}
"upsertAction" => {
builder = builder.set_upsert_action(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::UpsertAction::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_import_job_submitter<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ImportJobSubmitter>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ImportJobSubmitter::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"email" => {
builder = builder.set_email(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"userArn" => {
builder = builder.set_user_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_import_options<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ImportOptions>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ImportOptions::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"destinationOptions" => {
builder = builder.set_destination_options(
crate::json_deser::deser_structure_crate_model_destination_options(tokens)?
);
}
"delimitedTextOptions" => {
builder = builder.set_delimited_text_options(
crate::json_deser::deser_structure_crate_model_delimited_text_import_options(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_import_data_source<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ImportDataSource>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ImportDataSource::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"dataSourceConfig" => {
builder = builder.set_data_source_config(
crate::json_deser::deser_structure_crate_model_import_data_source_config(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_result_set<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ResultSet>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ResultSet::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"headers" => {
builder = builder.set_headers(
crate::json_deser::deser_list_com_amazonaws_honeycode_result_header(tokens)?
);
}
"rows" => {
builder = builder.set_rows(
crate::json_deser::deser_list_com_amazonaws_honeycode_result_rows(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_table_column<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::TableColumn>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::TableColumn::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"tableColumnId" => {
builder = builder.set_table_column_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"tableColumnName" => {
builder = builder.set_table_column_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"format" => {
builder = builder.set_format(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::Format::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_table_row<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::TableRow>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::TableRow::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"rowId" => {
builder = builder.set_row_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"cells" => {
builder = builder.set_cells(
crate::json_deser::deser_list_com_amazonaws_honeycode_cells(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_table<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Table>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Table::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"tableId" => {
builder = builder.set_table_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"tableName" => {
builder = builder.set_table_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_destination_options<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::DestinationOptions>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::DestinationOptions::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"columnMap" => {
builder = builder.set_column_map(
crate::json_deser::deser_map_com_amazonaws_honeycode_import_column_map(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_delimited_text_import_options<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::DelimitedTextImportOptions>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::DelimitedTextImportOptions::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"delimiter" => {
builder = builder.set_delimiter(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"hasHeaderRow" => {
builder = builder.set_has_header_row(
aws_smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"ignoreEmptyRows" => {
builder = builder.set_ignore_empty_rows(
aws_smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"dataCharacterEncoding" => {
builder = builder.set_data_character_encoding(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ImportDataCharacterEncoding::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_import_data_source_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ImportDataSourceConfig>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ImportDataSourceConfig::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"dataSourceUrl" => {
builder = builder.set_data_source_url(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_result_header<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::ColumnMetadata>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_column_metadata(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_result_rows<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::ResultRow>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_result_row(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_cells<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Cell>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_cell(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_map_com_amazonaws_honeycode_import_column_map<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<
std::collections::HashMap<std::string::String, crate::model::SourceDataColumnProperties>,
>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
let mut map = std::collections::HashMap::new();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
let key = key.to_unescaped().map(|u| u.into_owned())?;
let value =
crate::json_deser::deser_structure_crate_model_source_data_column_properties(tokens)?
;
if let Some(value) = value {
map.insert(key, value);
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(map))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_column_metadata<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ColumnMetadata>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ColumnMetadata::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"format" => {
builder = builder.set_format(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::Format::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_result_row<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ResultRow>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ResultRow::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"rowId" => {
builder = builder.set_row_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"dataItems" => {
builder = builder.set_data_items(
crate::json_deser::deser_list_com_amazonaws_honeycode_data_items(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_cell<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Cell>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Cell::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"formula" => {
builder = builder.set_formula(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"format" => {
builder = builder.set_format(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::Format::from(u.as_ref()))
})
.transpose()?,
);
}
"rawValue" => {
builder = builder.set_raw_value(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"formattedValue" => {
builder = builder.set_formatted_value(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_source_data_column_properties<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SourceDataColumnProperties>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SourceDataColumnProperties::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"columnIndex" => {
builder = builder.set_column_index(
aws_smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_honeycode_data_items<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::DataItem>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_data_item(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_crate_model_data_item<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::DataItem>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::DataItem::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"overrideFormat" => {
builder = builder.set_override_format(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::Format::from(u.as_ref()))
})
.transpose()?,
);
}
"rawValue" => {
builder = builder.set_raw_value(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"formattedValue" => {
builder = builder.set_formatted_value(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
|
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_table(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
|
user.js
|
const mongoose = require('mongoose')
const Schema = mongoose.Schema
mongoose.promise = Promise
const userSchema = new Schema({
firstName: String,
googleId: String,
|
})
const User = mongoose.model('User', userSchema)
module.exports = User
|
watchlist: String
|
settings.py
|
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h*a+l$fv=kzviydb68m@knh=6wh17c(e(tkdyq&fn4ht5lhrqt'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = '/vol/web/media'
STATIC_ROOT = '/vol/web/static'
AUTH_USER_MODEL = 'core.User'
| |
selection_widgets.py
|
"""Implements various elements to get user selection."""
from functools import partial
from kivy.animation import Animation
from kivy.factory import Factory
from kivy.lang import Builder
from kivy.properties import (
BooleanProperty,
ListProperty,
NumericProperty,
ObjectProperty,
OptionProperty,
StringProperty,
)
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.image import AsyncImage
from kivy.uix.modalview import ModalView
from kivy.uix.stacklayout import StackLayout
from kivymd.app import MDApp
from kivymd.theming import ThemableBehavior
from kivymd.uix.behaviors import CircularRippleBehavior, RectangularRippleBehavior
from kivymd.uix.card import MDCard
from kivymd.uix.imagelist import SmartTile
from .behaviors import (
CheckBehavior,
ChildrenFromDataBehavior,
LongPressBehavior,
ThemableColorChangeBehavior,
TranslationOnCheckBehavior,
)
class SeparatorWithHeading(FloatLayout):
r"""Two :class:`MDSeparator`\ s with a heading in between."""
heading = StringProperty("")
""":class:`~kivy.properties.StringProperty` with string used as heading."""
class CheckContainer(ChildrenFromDataBehavior):
"""Container for widgets with :class:`~custom_widgets.behaviors.CheckBehavior`."""
check_one = BooleanProperty(False)
""":class:`~kivy.properties.BooleanProperty` defaults to ``False``. If ``True`` only one child can be selected."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.child_bindings["current_state"] = self.conditional_uncheck
def conditional_uncheck(self, instance, value):
"""Uncheck other widgets if :attr:`check_one` is ``True``."""
if self.check_one:
for check_element in [
others for others in self.children if others != instance and value
]:
check_element.current_state = False
def get_checked(self, attribute_name=None):
"""
Return current selection.
Args:
attribute_name: Name of attribute to return. Defaults to ``None``.
Returns:
:* If ``attribute_name`` is None: List of selected children
* Else: List of attribute values
"""
checked_elements = [
element for element in self.children[::-1] if element.current_state
]
if attribute_name is None:
return checked_elements
return [
getattr(element, attribute_name) for element in checked_elements if element
]
class CheckCard(ThemableColorChangeBehavior, MDCard):
"""Selectable :~kivymd.uix.card.MDCard`. Select by click. Changes color on selection."""
text = StringProperty("test " * 15)
""":class:`~kivy.properties.StringProperty`."""
def on_press(self):
"""Change boolean value of :attr:`self.current_state`."""
self.current_state = ( # pylint: disable=attribute-defined-outside-init
not self.current_state
)
class CheckChip(
CircularRippleBehavior,
ButtonBehavior,
ThemableColorChangeBehavior,
BoxLayout,
):
"""Selectable Chip. Select by click. Change color on selection."""
icon = StringProperty("")
""":class:`~kivy.properties.StringProperty` defaults to ""."""
text = StringProperty("")
""":class:`~kivy.properties.StringProperty` defaults to ""."""
def on_press(self):
"""Change boolean value of :attr:`current_state`."""
self.current_state = ( # pylint: disable=attribute-defined-outside-init
not self.current_state
|
)
class TransChip(TranslationOnCheckBehavior, CheckChip):
"""Selectable Chip. Select by click. Change color and text on selection."""
class CheckChipContainer(CheckContainer, ThemableBehavior, StackLayout):
r"""Container for :class:`CheckChip`\ s. Use :attr:`child_dict` to populate."""
child_class_name = "CheckChip"
draw_box = BooleanProperty(False)
class CheckImageTile(CheckBehavior, SmartTile):
"""
Selectable :class:`~kivymd.uix.imagelist.SmartTile`.
Select by click. Changes :attr:`opacity` and :attr:`boarder_width` on selection.
"""
border_width = NumericProperty(0.01)
""":class:`~kivy.properties.NumericProperty` describing boarder-width of image tile."""
def __init__(self, **kwargs):
self.state_dicts = {
True: {"opacity": 1, "border_width": 3},
False: {"opacity": 0.8, "border_width": 0.01},
}
super().__init__(**kwargs)
def on_press(self):
"""Change boolean value of current state on press."""
self.current_state = ( # pylint: disable=attribute-defined-outside-init
not self.current_state
)
class TransCard(LongPressBehavior, MDCard, RectangularRippleBehavior):
"""Displays :attr:`text_orig` and :attr:`text_trans`, separated by a line."""
text_orig = StringProperty("")
""":class:`~kivy.properties.StringProperty` first text."""
text_trans = StringProperty("")
""":class:`~kivy.properties.StringProperty` second text."""
orientation = OptionProperty("vertical", options=["vertical", "horizontal"])
""":class:`~kivy.properties.OptionProperty` possible values ["vertical", "horizontal"] defaults to "vertical"."""
class LongPressImage(LongPressBehavior, AsyncImage):
""":class:`~kivy.uix.image.AsyncImage` with additional "on_press" and "on_long_press" event."""
Factory.register("LongPressImage", LongPressImage)
Factory.register("TransCard", TransCard)
class MyCarousel(FloatLayout, ChildrenFromDataBehavior):
"""
Carousel that constructs contents from :attr:`data`.
On click, opens a modal with list of content.
"""
carousel = ObjectProperty()
""":class:`~kivy.properties.ObjectProperty`"""
modal_layout_name = StringProperty()
""":class:`~kivy.properties.StringProperty`"""
modal_data_cls_name = StringProperty()
""":class:`~kivy.properties.StringProperty`"""
modal = ModalView()
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.child_bindings = {
"height": self.update_height,
"on_press": self.open_menu,
}
self.on_data()
def update_num_children(self):
"""Add/remove children until correct number is reached."""
diff = len(self.data) - len(self.root_for_children.children) + 1
for _ in range(abs(diff)):
if diff > 0:
self.add_child()
else:
self.remove_child()
def on_data(self, *_):
"""Override :meth:`behaviors.ChildrenFromDataBehavior.on_data` with correct list of children.
The children are in ``carousel.slides`` as opposed to ``carousel.children``.
"""
if self.child_class_name:
self.update_num_children()
self.carousel.index = 1
for i, child_dict in enumerate(self.data, start=1):
for key, val in child_dict.items():
setattr(self.carousel.slides[i], key, val)
def remove_child(self):
"""Override :meth:`behaviors.ChildrenFromDataBehavior.remove_child` with correct list of children.
The children are in ``carousel.slides`` as opposed to ``carousel.children``.
"""
last_slide = self.carousel.slides[-1]
self.carousel.remove_widget(last_slide)
def before_add_child(self, child):
"""Bind :meth:`set_child_width` to change of :attr:`width`."""
self.bind(width=lambda *_: self.set_child_width(child))
def after_add_child(self, child):
"""Call :meth:`set_child_width` after adding child."""
self.set_child_width(child)
def set_child_width(self, child, *_):
"""Set width of child to :attr:`width` - width of left and right-icon."""
width = self.width - self.ids.left_icon.width - self.ids.right_icon.width
setattr(child, "width", width)
def update_height(self, *_):
"""Implement in sub class. Placeholder."""
def get_modal_content(self, size_hint=(1, None)):
"""Return root widget to display on the modal."""
def set_carousel_index(i, *_):
self.carousel.index = i
self.modal.dismiss()
data_dicts = [
{"size_hint": size_hint, "on_press": partial(set_carousel_index, 0)}
] + [
{**dict, "size_hint": size_hint, "on_press": partial(set_carousel_index, i)}
for i, dict in enumerate(self.data, start=1)
]
recycle_view_cls = Factory.get(self.modal_layout_name)
recycle_view = recycle_view_cls()
recycle_view.child_class_name = self.modal_data_cls_name
recycle_view.data = data_dicts
return recycle_view
def get_checked(self, attribute_name=None):
"""If ``attribute_name`` is ``None``, return currently selected widget, else return a property thereof."""
checked_elements = [self.carousel.current_slide]
if attribute_name is None:
return checked_elements
return [
getattr(element, attribute_name) for element in checked_elements if element
]
def open_menu(self, *_):
"""Open :class:`kivy.uix.modalview.ModalView` with content given by :meth:`get_modal_content`."""
self.modal = ModalView()
modal_content = self.get_modal_content()
self.modal.add_widget(modal_content)
self.modal.open()
class ImageCarousel(MyCarousel):
"""Carousel of images."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.child_bindings["on_error"] = lambda *_: self.dispatch("on_error", *_)
self.register_event_type("on_error")
self.on_data()
def get_modal_content(self, size_hint=(1, 1)):
"""Call :meth:`MyCarousel.get_modal_content` with ``size_hint=(1,1)``."""
return super().get_modal_content(size_hint=size_hint)
def on_error(self, *_):
"""Placeholder-function."""
class CardCarousel(MyCarousel):
"""
Carousel of :class:`TransCard`.
To use it with different objects, change :attr:`viewclass` and :attr:`modal_data_cls_name`.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
del self.child_bindings["on_press"]
def update_height(self, *_):
"""Update height via animation, so that Widget has height of currently displayed card."""
if self.carousel.current_slide:
new_height = self.carousel.current_slide.height + 24
if self.height != new_height:
anim = Animation(height=new_height, duration=0.2)
anim.start(self)
class RecycleCarousel(FloatLayout):
"""
Wrapper class for a :class:`~kivy.uix.carousel.Carousel` that uses only 3 slides to update content dynamically.
The :attr:`index` is updated according to the change of the carousel index and each time one of the slides is
updated with data from :attr:`data`. The content of the slides is constructed as instances of :attr:`viewclass`.
"""
carousel = ObjectProperty()
""":class:`kivy.properties.ObjectProperty` defaults to ``None``."""
viewclass = StringProperty("TransCard")
""":class:`kivy.properties.StringProperty` defaults to ``"TransCard"``. Class name of the widgets that are added
to the carousel."""
data = ListProperty()
""":class:`kivy.properties.ListProperty` defaults to ``None``. List of dictionaries from which the content is
generated."""
slide_width = NumericProperty()
""":class:`kivy.properties.NumericProperty` defaults to ``None``. Width that the content of the slides should
have."""
dynamic_height = BooleanProperty(False)
""":class:`kivy.properties.BooleanProperty` defaults to ``False``. If ``True`` updates the height of the root
widget to the height of the object on the current slide + 24. Only possible if size_hint_y of the widget on the
slide is not set."""
index = NumericProperty(0)
""":class:`kivy.properties.NumericProperty` defaults to ``0``. Current (virtual) index."""
last_carousel_index = NumericProperty(0)
""":class:`kivy.properties.NumericProperty` defaults to ``0``. Last index that the :attr:`carousel` had. Used to
determine whether the user did slide right or left."""
current_slide = ObjectProperty()
""":class:`kivy.properties.ObjectProperty`. Reference to :attr:`carousel`.current_slide."""
modal_layout_name = StringProperty()
""":class:`kivy.properties.StringProperty` defaults to ``None``. Class name for root widget of :attr:`modal`."""
modal_data_cls_name = StringProperty()
""":class:`kivy.properties.StringProperty` defaults to ``None``. Class name for children of :attr:`modal`."""
modal = ObjectProperty(ModalView())
""":class:`kivy.properties.ObjectProperty` defaults to ``ModalView()``."""
default_modal_size_hint = ListProperty([1, None])
def update_height(self, *_):
"""Update height via animation, so that Widget has height of currently displayed card."""
if self.dynamic_height:
new_height = self.carousel.current_slide.height + 24
if self.height != new_height:
anim = Animation(height=new_height, duration=0.3)
anim.start(self)
def setup_modal(self):
"""Return root widget to display on the modal."""
self.modal = ModalView()
modal_root_cls = Factory.get(self.modal_layout_name)
modal_root = modal_root_cls()
self.modal.add_widget(modal_root)
def _modal_child_callback(self, i, *_):
self.set_index(i)
self.modal.dismiss()
def update_modal_content(self):
"""Update content of modal."""
data_dicts = [
{
**dict,
"size_hint": self.default_modal_size_hint,
"on_press": partial(self._modal_child_callback, i),
}
for i, dict in enumerate(self.data)
]
self.modal.children[0].child_class_name = self.modal_data_cls_name
self.modal.children[0].data = data_dicts
def get_checked(self, attribute_name=None):
"""If ``attribute_name`` is ``None``, return currently selected widget, else return a property thereof."""
checked_elements = [self.carousel.current_slide]
if attribute_name is None:
return checked_elements
return [
getattr(element, attribute_name) for element in checked_elements if element
]
def open_menu(self, *_):
"""Open :class:`kivy.uix.modalview.ModalView` with content given by :meth:`setup_modal`."""
if not self.modal.children:
self.setup_modal()
self.update_modal_content()
self.modal.open()
def on_data(self, *_):
"""Set up :attr:`carousel` by initializing 3 widgets, adding them and binding some Properties."""
self.carousel.clear_widgets()
if len(self.data) >= 3:
for i in [0, 1, -1]:
widget = Factory.get(self.viewclass)(**self.data[i])
self.carousel.add_widget(widget)
self.bind(slide_width=widget.setter("width"))
widget.bind(on_press=self.open_menu)
widget.width = self.slide_width
self.carousel.register_event_type("on_index")
self.carousel.bind(index=self.update_index)
self.carousel.bind(current_slide=self.update_height)
self.carousel.current_slide.bind(height=self.update_height)
print("RecylceCarousel needs at least 3 elements to be displayed correctly.")
def update_index(self, _, carousel_index):
"""Change :attr:`index` according to change in ``carousel_index`` and update one of the three slides."""
diff = carousel_index - self.last_carousel_index
diff = -1 if diff == 2 else 1 if diff == -2 else diff
self.last_carousel_index = carousel_index
self.index = (self.index + diff) % len(self.data)
self.update_slide(carousel_index + diff, self.index + diff)
def update_slide(self, carousel_index, index):
"""
Update slide with index ``carousel_index`` by content from :attr:`data` [index].
Modulo function applied to indices guarantees values to be in the correct range.
"""
carousel_index %= 3
index %= len(self.data)
for name, val in self.data[index].items():
setattr(self.carousel.slides[carousel_index], name, val)
def set_index(self, index):
"""Set :attr:`index` to ``index`` and updates carousel accordingly."""
self.index = index
self.update_height()
for i in [0, 1, -1]:
self.update_slide((self.last_carousel_index + i) % 3, self.index + i)
# pylint: disable = W,C,R,I
if __name__ == "__main__":
CARD_CAROUSEL_STRING = (
"CardCarousel:\n"
' data: [{"text_orig":str(i)*50*i,"text_trans":"Trans"} for i in range(10)]'
)
RECYCLE_CAROUSEL_STRING = (
"RecycleCardCarousel:\n" # some comment
' data: [{"text_orig":str(i)*50*i,"text_trans":"Trans"} for i in range(10)]'
)
IMAGE_CAROUSEL_STRING = (
"ImageCarousel:\n"
' data: [{"source":"../assets/AnkiCardGen.png"} for _ in range(5)]'
)
class _TestApp(MDApp):
def build(self):
self.theme_cls.primary_palette = "Red" # "Purple", "Red"
self.theme_cls.theme_style = "Light" # "Purple", "Red"
return Builder.load_string(RECYCLE_CAROUSEL_STRING)
_TestApp().run()
| |
api.js
|
YUI.add("yuidoc-meta", function(Y) {
Y.YUIDoc = { meta: {
"classes": [
"CloseButton",
"CompositeMask",
"CornerPart",
"FadableItem",
"HidableItem",
"Mask.Arrow",
"Mask.Arrows",
"Mask.ControlVariables",
"Mask.DetailsPanel",
"Mask.StepDescription",
"Mask.StepDescriptionNextButton",
"Mask.Subject",
|
"Mask.WizardMenu",
"Part",
"Polling",
"SS",
"SSException",
"Screen",
"VisualItem"
],
"modules": [],
"allModules": []
} };
});
|
"Mask.SubjectMask",
"Mask.Wizard",
|
main.ts
|
/// <reference types="types-for-adobe/illustrator/2015.3"/>
/// <reference types="../../ISharedData"/>
declare const sharedDate: ISharedData
interface PathItems {
/**
* Create an elliptical path item.
* @param top The ellipse's bounds.
* @param left The ellipse's bounds.
* @param width The ellipse's bounds.
* @param height The height of the ellipse.
* @param reversed Is the ellipse path reversed?
* @param inscribed Is the ellipse path inscribed?
*/
ellipse(
top?: number,
left?: number,
width?: number,
height?: number,
reversed?: boolean,
inscribed?: boolean,
): PathItem
/**
* Used to create a regular polygon path item. Not for path item access.
* @param centerX
* @param centerY
* @param radius The radius of the polygon points.
* @param sides The number of sides on the polygon.
* @param reversed Is the polygon path reversed?
*/
polygon(
|
radius?: number,
sides?: number,
reversed?: boolean,
): PathItem
/**
* Used to create a rectangular path item. Not for path item access.
* @param top The top coordinate of the rectangle's bounds.
* @param left The left coordinate of the rectangle's bounds.
* @param width The width of the rectangle.
* @param height The height of the rectangle.
* @param reversed Is the rectangle path reversed?
*/
rectangle(
top: number,
left: number,
width: number,
height: number,
reversed?: boolean,
): PathItem
/**
* Used to create a rounded-corner rectangular path item. Not for path item access.
* @param top
* @param left
* @param width
* @param height
* @param horizontalRadius Horizontal corner radius.
* @param verticalRadius Vertical corner radius.
* @param reversed Is the rectangle path reversed?
*/
roundedRectangle(
top: number,
left: number,
width: number,
height: number,
horizontalRadius?: number,
verticalRadius?: number,
reversed?: boolean,
): PathItem
/**
* Used to create a star-shaped path item. Not for path item access.
* @param centerX
* @param centerY
* @param radius The outside radius of the star points.
* @param innerRadius The inside radius of the star points.
* @param points The number of points on the star.
* @param reversed Is the star path reversed?
*/
star(
centerX?: number,
centerY?: number,
radius?: number,
innerRadius?: number,
points?: number,
reversed?: boolean,
): PathItem
}
interface TextFont {
axisVector: any
}
// close all current opening documents
const arrayOfDocuments = app.documents
for(let document of arrayOfDocuments) {
if(document instanceof Document) {
document.close(SaveOptions.DONOTSAVECHANGES)
}
}
const newDoc = app.documents.add(
DocumentColorSpace.CMYK,
sharedDate.width,
sharedDate.height,
1
)
for(let i = 0; i < newDoc.artboards.length; i++) {
const artBoard: Artboard = newDoc.artboards[i]
const rect = newDoc.pathItems.rectangle( 0, 0, sharedDate.width, sharedDate.height);
artBoard.artboardRect = rect.geometricBounds
}
let textFrame = newDoc.textFrames.add()
textFrame.contents = sharedDate.textContent
textFrame.top = 0
textFrame.left = 0
const stringOf_TextFont_Object = getInfoOfObject(textFrame.textRange.characterAttributes.textFont)
let listOfFonts = ""
for(const font of app.textFonts) {
listOfFonts += `
${font.name}`
}
const fontToUsed = app.textFonts.getByName("SourceSerifVariable-Roman")
fontToUsed.axisVector = 200.10
textFrame.textRange.characterAttributes.textFont = fontToUsed
getInfoOfObject(fontToUsed)
// newDoc.close()
function getInfoOfObject(test: any) {
let stringToReturn = ""
for(const testKey in test) {
if(test.hasOwnProperty(testKey)) {
// @ts-ignore
stringToReturn += `${testKey}: ${test[testKey]}; `
}
}
return stringToReturn
}
|
centerX?: number,
centerY?: number,
|
ginmiddle.go
|
package example
import (
"sre-breaker/breaker"
|
)
func ginMiddleware() *gin.Engine {
engine := gin.Default()
engine.Use(breaker.GinBreakerHandler())
return engine
}
func main() {
r := ginMiddleware()
r.GET("/ping", func(c *gin.Context) {
c.JSON(200, gin.H{
"message": "pong",
})
})
r.Run()
}
|
"github.com/gin-gonic/gin"
|
login-form.component.ts
|
import { Component, OnDestroy, OnInit } from '@angular/core';
import { FormBuilder, FormGroup, Validators } from '@angular/forms';
import {
AuthRedirectService,
AuthService,
GlobalMessageService,
GlobalMessageType,
WindowRef,
} from '@spartacus/core';
import { Subscription } from 'rxjs';
import { CustomFormValidators } from '../../../shared/index';
@Component({
selector: 'cx-login-form',
templateUrl: './login-form.component.html',
})
export class LoginFormComponent implements OnInit, OnDestroy {
sub: Subscription;
loginForm: FormGroup;
constructor(
protected auth: AuthService,
protected globalMessageService: GlobalMessageService,
protected fb: FormBuilder,
protected authRedirectService: AuthRedirectService,
protected winRef: WindowRef
) {}
ngOnInit(): void {
const routeState = this.winRef.nativeWindow?.history?.state;
const prefilledEmail = routeState?.['newUid'];
this.loginForm = this.fb.group({
userId: [
prefilledEmail?.length ? prefilledEmail : '',
[Validators.required, CustomFormValidators.emailValidator],
],
password: ['', Validators.required],
});
}
submitForm(): void {
if (this.loginForm.valid) {
this.loginUser();
} else {
this.loginForm.markAllAsTouched();
}
}
ngOnDestroy(): void {
if (this.sub) {
this.sub.unsubscribe();
}
}
|
protected loginUser(): void {
const { userId, password } = this.loginForm.controls;
this.auth.authorize(
userId.value.toLowerCase(), // backend accepts lowercase emails only
password.value
);
if (!this.sub) {
this.sub = this.auth.getUserToken().subscribe((data) => {
if (data && data.access_token) {
this.globalMessageService.remove(GlobalMessageType.MSG_TYPE_ERROR);
this.authRedirectService.redirect();
}
});
}
}
}
| |
lib.rs
|
pub mod grid;
pub use grid::Grid;
|
||
traits.rs
|
/*!
Common traits for all libraries.
*/
use crate::{BorrowedPacket, DataLink, Error, InterfaceDescription, LibraryVersion, Stats};
use std::ffi::{CStr, CString};
use std::iter::IntoIterator;
use std::sync::Arc;
///Trait for structures representing an opened interface (or network card or network device)
///
/// Interfaces are opened using a concrete library - check the Library trait.
pub trait DynamicInterface<'a>: Send + Sync {
///Sends a raw packet.
fn send(&self, packet: &[u8]) -> Result<(), Error>;
///Receives a raw packet.
fn receive(&mut self) -> Result<BorrowedPacket<'_>, Error>;
///Flushes a queue
fn flush(&self);
///Provides information about the underlying technology used for this connection.
fn data_link(&self) -> DataLink;
///Provides transmission statistics
fn stats(&self) -> Result<Stats, Error>;
///Breaks previously started loops.
fn break_loop(&self);
/**
Runs infinite loop and passes received packets via callback.
Exits when the break_loop() function is called or on error.
*/
fn loop_infinite_dyn(&self, callback: &dyn FnMut(&BorrowedPacket)) -> Result<(), Error>;
///Set bpf filter.
fn set_filter(&mut self, filter: &str) -> Result<(), Error> {
let filter = CString::new(filter)?;
self.set_filter_cstr(&filter)
}
///Set bpf filter.
fn set_filter_cstr(&mut self, filter: &CStr) -> Result<(), Error>;
///Remove bpf filter.
fn remove_filter(&mut self) -> Result<(), Error>;
//TODO
//bpf filters
//receive/send with timeout
}
/**
Contains static part of the interface trait.
Template functions cannot be used for dynamic dispatch (&dyn) and it was necessary to split the
interface trait into two parts - static and dynamic, depending how user uses the trait.
StaticInterface contains only the part of trait that cannot be used in the dynamic way.
*/
pub trait StaticInterface<'a>: DynamicInterface<'a> {
/**
Runs infinite loop and passes received packets via callback.
Exits when the break_loop() function is called or on error.
*/
fn loop_infinite<F>(&self, callback: F) -> Result<(), Error>
where
F: FnMut(&BorrowedPacket);
}
/// Trait for structures representing opened packet capture libraries.
///
/// There are several libraries that can be used among different platforms.
/// For example pcap.so, wpcap.dll or pfring.so.
/// This trait provides a consistent interface to all of them.
pub trait Library: Send + Sync {
//const DEFAULT_PATHS: &'static [&'static str];
///Opens this library by searching for most common paths and names fro the given platform
fn open_default_paths() -> Result<Self, Error>
where
Self: Sized,
{
Self::open_paths(Self::default_paths().iter().copied())
}
///Returns list of default paths to the library on the given platform.
fn default_paths() -> &'static [&'static str]
where
Self: Sized;
///Opens library searching in the list of provided paths.
fn open_paths<'b, T>(paths: T) -> Result<Self, Error>
where
Self: Sized,
T: IntoIterator<Item = &'b str>,
|
///Opens library by checking the provided path to it.
fn open(path: &str) -> Result<Self, Error>
where
Self: Sized;
///Opens interface (network card or network device) with the provided name.
///
/// You can obtain names of available devices by calling the all_interfaces() function.
fn open_interface<'a>(
&'a self,
name: &str,
) -> Result<Box<dyn DynamicInterface<'a> + 'a>, Error>;
fn open_interface_arc<'a>(
&'a self,
name: &str,
) -> Result<Arc<dyn DynamicInterface<'a> + 'a>, Error>;
/**
Obtains list of available network interfaces.
Each of returned interface names can be further used to open interfaces.
**Note:** each library may support different set of interfaces.
This is because different libraries support different network interface types and some of them
add to the list virtual interfaces (such as pcap "any" interface or pfring "zc:eth0").
The same function called with pcap library will return different set of interfaces than run with pfring.
However in both cases the returned interface list will be supported by currently used library.
# Example
```no_run
use tokio_rawsock::open_best_library;
use tokio_rawsock::traits::Library;
fn main(){
let lib = open_best_library().expect("Could not open any library.");
let interfs = lib.all_interfaces().expect("Could not obtain interface list");
for interf in &interfs{
println!("Found interface: {}", &interf.name);
}
let interf = lib.open_interface(&interfs.first().unwrap().name)
.expect("Could not open interface");
// do something with the interface
}
```
*/
fn all_interfaces(&self) -> Result<Vec<InterfaceDescription>, Error>;
///Returns library version
fn version(&self) -> LibraryVersion;
}
|
{
let mut err = Error::NoPathsProvided;
for path in paths.into_iter() {
match Self::open(path) {
Err(e) => err = e,
Ok(lib) => return Ok(lib),
}
}
Err(err)
}
|
glutin.rs
|
//! Window creation using glutin for gfx.
//!
//! # Examples
//!
//! The following code creates a `gfx::Surface` using glutin.
//!
//! ```no_run
//! extern crate glutin;
//! extern crate gfx_backend_gl;
//!
//! fn main() {
//! use gfx_backend_gl::Surface;
//! use glutin::{ContextBuilder, WindowedContext};
//! use glutin::window::WindowBuilder;
//! use glutin::event_loop::EventLoop;
//!
//! // First create a window using glutin.
//! let mut events_loop = EventLoop::new();
//! let wb = WindowBuilder::new();
//! let glutin_window = ContextBuilder::new().with_vsync(true).build_windowed(wb, &events_loop).unwrap();
//! let (glutin_context, glutin_window) = unsafe { glutin_window.make_current().expect("Failed to make the context current").split() };
//!
//! // Then use the glutin window to create a gfx surface.
//! let surface = Surface::from_context(glutin_context);
//! }
//! ```
//!
//! Headless initialization without a window.
//!
//! ```no_run
//! extern crate glutin;
//! extern crate gfx_backend_gl;
//! extern crate gfx_hal;
//!
//! use gfx_hal::Instance;
//! use gfx_backend_gl::Headless;
//! use glutin::{Context, ContextBuilder};
//! use glutin::event_loop::EventLoop;
//!
//! fn main() {
//! let events_loop = EventLoop::new();
//! let context = ContextBuilder::new().build_headless(&events_loop, glutin::dpi::PhysicalSize::new(0.0, 0.0))
//! .expect("Failed to build headless context");
//! let context = unsafe { context.make_current() }.expect("Failed to make the context current");
//! let headless = Headless::from_context(context);
//! let _adapters = headless.enumerate_adapters();
//! }
//! ```
use crate::{conv, native, Backend as B, Device, GlContainer, PhysicalDevice, QueueFamily, Starc};
use hal::{adapter::Adapter, format as f, image, window};
use std::ffi::c_void;
use std::os::raw::c_ulong;
use std::sync::Arc;
use arrayvec::ArrayVec;
use glow::HasContext;
use glutin::{self, platform::unix::RawContextExt};
use std::iter;
#[derive(Debug)]
pub struct Swapchain {
// Underlying window, required for presentation
pub(crate) context: Starc<glutin::RawContext<glutin::PossiblyCurrent>>,
// Extent because the window lies
pub(crate) extent: window::Extent2D,
///
pub(crate) fbos: ArrayVec<[native::RawFrameBuffer; 3]>,
}
#[derive(Debug)]
pub enum Instance {
Headless(Headless),
Surface(Surface),
}
impl Instance {
pub fn create_surface_from_wayland(
&self,
display: *mut c_void,
surface: *mut c_void,
) -> Surface {
log::trace!("Creating GL surface from wayland");
let context = unsafe {
glutin::ContextBuilder::new()
.with_vsync(true)
.build_raw_wayland_context(
display as _,
surface,
/*TODO: do something with these dimensions*/
400,
400,
)
.expect("TODO: handle this error")
};
let context = unsafe { context.make_current().expect("TODO: handle this error") };
Surface::from_context(context)
}
pub fn create_surface_from_xlib(&self, window: c_ulong, display: *mut c_void) -> Surface {
log::trace!("Creating GL surface from Xlib");
let xconn = {
// This is taken from `glutin::platform::unix::x11::XConnection::new except with tweaks
// that allow us to create the connection with an existing display pointer
use glutin::platform::unix::x11::{ffi, XConnection};
// opening the libraries
let xlib = ffi::Xlib::open().expect("TODO: Handle error");
let xcursor = ffi::Xcursor::open().expect("TODO: Handle error");
let xrandr = ffi::Xrandr_2_2_0::open().expect("TODO: Handle error");
let xrandr_1_5 = ffi::Xrandr::open().ok();
let xinput2 = ffi::XInput2::open().expect("TODO: Handle error");
let xlib_xcb = ffi::Xlib_xcb::open().expect("TODO: Handle error");
let xrender = ffi::Xrender::open().expect("TODO: Handle error");
unsafe { (xlib.XInitThreads)() };
// unsafe { (xlib.XSetErrorHandler)(error_handler) };
// Get X11 socket file descriptor
let fd = unsafe { (xlib.XConnectionNumber)(display as *mut ffi::_XDisplay) };
XConnection {
xlib,
xrandr,
xrandr_1_5,
xcursor,
xinput2,
xlib_xcb,
xrender,
display: display as _,
x11_fd: fd,
latest_error: parking_lot::Mutex::new(None),
cursor_cache: Default::default(),
}
};
let xconn = Arc::new(xconn);
let context = unsafe {
glutin::ContextBuilder::new()
.with_vsync(true)
.build_raw_x11_context(xconn, window)
.expect("TODO: handle this error")
};
let context = unsafe { context.make_current().expect("TODO: handle this error") };
Surface::from_context(context)
}
}
impl hal::Instance<B> for Instance {
fn create(name: &str, version: u32) -> Result<Instance, hal::UnsupportedBackend> {
Headless::create(name, version).map(Instance::Headless)
}
fn enumerate_adapters(&self) -> Vec<Adapter<B>> {
match self {
Instance::Headless(instance) => instance.enumerate_adapters(),
Instance::Surface(instance) => instance.enumerate_adapters(),
}
}
unsafe fn create_surface(
&self,
has_handle: &impl raw_window_handle::HasRawWindowHandle,
) -> Result<Surface, hal::window::InitError> {
use raw_window_handle::RawWindowHandle;
match self {
Instance::Headless(instance) => instance.create_surface(has_handle),
Instance::Surface(instance) => instance.create_surface(has_handle),
}
.expect("TODO");
match has_handle.raw_window_handle() {
#[cfg(all(unix, not(android), not(macos)))]
RawWindowHandle::Wayland(handle) => {
Ok(self.create_surface_from_wayland(handle.display, handle.surface))
}
#[cfg(all(unix, not(android), not(macos)))]
RawWindowHandle::Xlib(handle) => {
Ok(self.create_surface_from_xlib(handle.window, handle.display))
}
_ => Err(hal::window::InitError::UnsupportedWindowHandle),
}
}
unsafe fn destroy_surface(&self, surface: Surface) {
match self {
Instance::Headless(instance) => instance.destroy_surface(surface),
Instance::Surface(instance) => instance.destroy_surface(surface),
}
}
}
//TODO: if we make `Surface` a `WindowBuilder` instead of `RawContext`,
// we could spawn window + GL context when a swapchain is requested
// and actually respect the swapchain configuration provided by the user.
#[derive(Debug)]
pub struct Surface {
pub(crate) context: Starc<glutin::RawContext<glutin::PossiblyCurrent>>,
pub(crate) swapchain: Option<Swapchain>,
renderbuffer: Option<native::Renderbuffer>,
}
impl Surface {
pub fn from_context(context: glutin::RawContext<glutin::PossiblyCurrent>) -> Self {
Surface {
renderbuffer: None,
swapchain: None,
context: Starc::new(context),
}
}
pub fn context(&self) -> &glutin::RawContext<glutin::PossiblyCurrent> {
&self.context
}
fn swapchain_formats(&self) -> Vec<f::Format> {
let pixel_format = self.context.get_pixel_format();
let color_bits = pixel_format.color_bits;
let alpha_bits = pixel_format.alpha_bits;
let srgb = pixel_format.srgb;
// TODO: expose more formats
match (color_bits, alpha_bits, srgb) {
(24, 8, true) => vec![f::Format::Rgba8Srgb, f::Format::Bgra8Srgb],
(24, 8, false) => vec![f::Format::Rgba8Unorm, f::Format::Bgra8Unorm],
_ => vec![],
}
}
}
impl window::PresentationSurface<B> for Surface {
type SwapchainImage = native::ImageView;
unsafe fn configure_swapchain(
&mut self,
device: &Device,
config: window::SwapchainConfig,
) -> Result<(), window::CreationError> {
let gl = &device.share.context;
if let Some(old) = self.swapchain.take() {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
}
if self.renderbuffer.is_none() {
self.renderbuffer = Some(gl.create_renderbuffer().unwrap());
}
let desc = conv::describe_format(config.format).unwrap();
gl.bind_renderbuffer(glow::RENDERBUFFER, self.renderbuffer);
gl.renderbuffer_storage(
glow::RENDERBUFFER,
desc.tex_internal,
config.extent.width as i32,
config.extent.height as i32,
);
let fbo = gl.create_framebuffer().unwrap();
gl.bind_framebuffer(glow::READ_FRAMEBUFFER, Some(fbo));
gl.framebuffer_renderbuffer(
glow::READ_FRAMEBUFFER,
glow::COLOR_ATTACHMENT0,
glow::RENDERBUFFER,
|
context: self.context.clone(),
extent: config.extent,
fbos: iter::once(fbo).collect(),
});
Ok(())
}
unsafe fn unconfigure_swapchain(&mut self, device: &Device) {
let gl = &device.share.context;
if let Some(old) = self.swapchain.take() {
for fbo in old.fbos {
gl.delete_framebuffer(fbo);
}
}
if let Some(rbo) = self.renderbuffer.take() {
gl.delete_renderbuffer(rbo);
}
}
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64,
) -> Result<(Self::SwapchainImage, Option<window::Suboptimal>), window::AcquireError> {
let image = native::ImageView::Renderbuffer(self.renderbuffer.unwrap());
Ok((image, None))
}
}
impl window::Surface<B> for Surface {
fn supports_queue_family(&self, _: &QueueFamily) -> bool {
true
}
fn capabilities(&self, _physical_device: &PhysicalDevice) -> window::SurfaceCapabilities {
window::SurfaceCapabilities {
present_modes: window::PresentMode::FIFO, //TODO
composite_alpha_modes: window::CompositeAlphaMode::OPAQUE, //TODO
image_count: if self.context.get_pixel_format().double_buffer {
2..=2
} else {
1..=1
},
current_extent: None,
extents: window::Extent2D {
width: 4,
height: 4,
}..=window::Extent2D {
width: 4096,
height: 4096,
},
max_image_layers: 1,
usage: image::Usage::COLOR_ATTACHMENT | image::Usage::TRANSFER_SRC,
}
}
fn supported_formats(&self, _physical_device: &PhysicalDevice) -> Option<Vec<f::Format>> {
Some(self.swapchain_formats())
}
}
impl hal::Instance<B> for Surface {
fn create(_: &str, _: u32) -> Result<Self, hal::UnsupportedBackend> {
panic!("Unable to create a surface")
}
fn enumerate_adapters(&self) -> Vec<Adapter<B>> {
let adapter = PhysicalDevice::new_adapter(
(),
GlContainer::from_fn_proc(|s| self.context.get_proc_address(s) as *const _),
);
vec![adapter]
}
unsafe fn create_surface(
&self,
_: &impl raw_window_handle::HasRawWindowHandle,
) -> Result<Surface, window::InitError> {
unimplemented!()
}
unsafe fn destroy_surface(&self, _surface: Surface) {
// TODO: Implement Surface cleanup
}
}
// This isn't used anymore according to the linter. Keeping it commented just in case.
pub fn config_context<C>(
builder: glutin::ContextBuilder<C>,
color_format: f::Format,
ds_format: Option<f::Format>,
) -> glutin::ContextBuilder<C>
where
C: glutin::ContextCurrentState,
{
let color_base = color_format.base_format();
let color_bits = color_base.0.describe_bits();
let depth_bits = match ds_format {
Some(fm) => fm.base_format().0.describe_bits(),
None => f::BITS_ZERO,
};
builder
.with_depth_buffer(depth_bits.depth)
.with_stencil_buffer(depth_bits.stencil)
.with_pixel_format(color_bits.color, color_bits.alpha)
.with_srgb(color_base.1 == f::ChannelType::Srgb)
}
#[derive(Debug)]
pub struct Headless(pub Starc<glutin::Context<glutin::PossiblyCurrent>>);
impl Headless {
pub fn from_context(context: glutin::Context<glutin::PossiblyCurrent>) -> Headless {
Headless(Starc::new(context))
}
}
impl hal::Instance<B> for Headless {
fn create(_: &str, _: u32) -> Result<Self, hal::UnsupportedBackend> {
let context: glutin::Context<glutin::NotCurrent>;
#[cfg(linux)]
{
/// TODO: Update portability to make this more flexible
use glutin::platform::unix::HeadlessContextExt;
let size = glutin::dpi::PhysicalSize::from((800, 600));
let builder = glutin::ContextBuilder::new().with_hardware_acceleration(Some(false));
context = HeadlessContextExt::build_osmesa(builder, size).map_err(|e| {
info!("Headless context error {:?}", e);
hal::UnsupportedBackend
})?;
}
#[cfg(not(linux))]
{
context = unimplemented!();
}
let context = unsafe { context.make_current() }.expect("failed to make context current");
Ok(Headless::from_context(context))
}
fn enumerate_adapters(&self) -> Vec<Adapter<B>> {
let adapter = PhysicalDevice::new_adapter(
(),
GlContainer::from_fn_proc(|s| self.0.get_proc_address(s) as *const _),
);
vec![adapter]
}
unsafe fn create_surface(
&self,
_: &impl raw_window_handle::HasRawWindowHandle,
) -> Result<Surface, window::InitError> {
unimplemented!()
}
unsafe fn destroy_surface(&self, _surface: Surface) {
// TODO: Implement Surface cleanup
}
}
|
self.renderbuffer,
);
self.swapchain = Some(Swapchain {
|
parseDataToGallery.ts
|
import { TTMDBResponse } from 'api/types';
import createImagePath from 'helpers/createImagePath';
import { TMovie } from 'types';
const parseDataToGallery = (data?: TTMDBResponse): TMovie[] => {
if (!data) {
return [];
}
|
return data.results?.map(({ id, poster_path, title, overview }) => ({
id,
title,
description: overview,
poster: createImagePath(poster_path),
}));
};
export default parseDataToGallery;
| |
clean.py
|
# -*- coding: utf-8 -*-
"""
chemdataextractor.scrape.clean
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tools for cleaning up XML/HTML by removing tags entirely or replacing with their contents.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
import logging
import re
from lxml.etree import fromstring, tostring
from lxml.html import fromstring as html_fromstring
import six
from . import BLOCK_ELEMENTS
log = logging.getLogger(__name__)
class Cleaner(object):
"""Clean HTML or XML by removing tags completely or replacing with their contents.
A Cleaner instance provides a ``clean_markup`` method::
cleaner = Cleaner()
htmlstring = '<html><body><script>alert("test")</script><p>Some text</p></body></html>'
print(cleaner.clean_markup(htmlstring))
A Cleaner instance is also a callable that can be applied to lxml document trees::
tree = lxml.etree.fromstring(htmlstring)
cleaner(tree)
print(lxml.etree.tostring(tree))
Elements that are matched by ``kill_xpath`` are removed entirely, along with their contents. By default,
``kill_xpath`` matches all script and style tags, as well as comments and processing instructions.
Elements that are matched by ``strip_xpath`` are replaced with their contents. By default, no elements are stripped.
A common use-case is to set ``strip_xpath`` to ``.//*``, which specifies that all elements should be stripped.
Elements that are matched by ``allow_xpath`` are excepted from stripping, even if they are also matched by
``strip_xpath``. This is useful when setting ``strip_xpath`` to strip all tags, allowing a few expections to be
specified by ``allow_xpath``.
"""
kill_xpath = './/script | .//style | .//comment() | .//processing-instruction() | .//*[@style="display:none;"]'
strip_xpath = None
allow_xpath = None
fix_whitespace = True
namespaces = {
're': 'http://exslt.org/regular-expressions',
'set': 'http://exslt.org/sets',
'dc': 'http://purl.org/dc/elements/1.1/',
'prism': 'http://prismstandard.org/namespaces/basic/2.0/',
'xml': 'http://www.w3.org/XML/1998/namespace',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns',
}
def __init__(self, **kwargs):
"""Behaviour can be customized by overriding attributes in a subclass or setting them in the constructor.
:param string kill_xpath: XPath expression for tags to remove along with their contents.
:param string strip_xpath: XPath expression for tags to replace with their contents.
:param string allow_xpath: XPath expression for tags to except from strip_xpath.
:param bool fix_whitespace: Normalize whitespace to a single space and ensure newlines around block elements.
:param dict namespaces: Namespace prefixes to register for the XPaths.
"""
# TODO: This is weird. Why don't we change to proper individual keyword arguments with class attribs as default
for name, value in kwargs.items():
if not hasattr(self, name):
raise TypeError('Unknown parameter: %s=%r' % (name, value))
setattr(self, name, value)
def __call__(self, doc):
"""Clean the document."""
if hasattr(doc, 'getroot'):
doc = doc.getroot()
if self.fix_whitespace:
# Ensure newlines around block elements
for el in doc.iterdescendants():
if el.tag in BLOCK_ELEMENTS:
el.tail = (el.tail or '') + '\n'
previous = el.getprevious()
parent = el.getparent()
if previous is None:
parent.text = (parent.text or '') + '\n'
else:
previous.tail = (previous.tail or '') + '\n'
# Remove elements that match kill_xpath
if self.kill_xpath:
for el in doc.xpath(self.kill_xpath, namespaces=self.namespaces):
#log.debug('Killing: %s' % tostring(el))
parent = el.getparent()
# We can't kill the root element!
if parent is None:
continue
if el.tail:
previous = el.getprevious()
|
if previous is None:
parent.text = (parent.text or '') + el.tail
else:
previous.tail = (previous.tail or '') + el.tail
parent.remove(el)
# Collect all the allowed elements
to_keep = [el for el in doc.xpath(self.allow_xpath, namespaces=self.namespaces)] if self.allow_xpath else []
# Replace elements that match strip_xpath with their contents
if self.strip_xpath:
for el in doc.xpath(self.strip_xpath, namespaces=self.namespaces):
# Skip if allowed by allow_xpath
if el in to_keep:
continue
parent = el.getparent()
previous = el.getprevious()
# We can't strip the root element!
if parent is None:
continue
# Append the text to previous tail (or parent text if no previous), ensuring newline if block level
if el.text and isinstance(el.tag, six.string_types):
if previous is None:
parent.text = (parent.text or '') + el.text
else:
previous.tail = (previous.tail or '') + el.text
# Append the tail to last child tail, or previous tail, or parent text, ensuring newline if block level
if el.tail:
if len(el):
last = el[-1]
last.tail = (last.tail or '') + el.tail
elif previous is None:
parent.text = (parent.text or '') + el.tail
else:
previous.tail = (previous.tail or '') + el.tail
index = parent.index(el)
parent[index:index+1] = el[:]
# Collapse whitespace down to a single space or a single newline
if self.fix_whitespace:
for el in doc.iter():
if el.text is not None:
el.text = re.sub(r'\s*\n\s*', '\n', el.text)
el.text = re.sub(r'[ \t]+', ' ', el.text)
# el.text = re.sub(r'\s+', ' ', el.text)
if el.tail is not None:
el.tail = re.sub(r'\s*\n\s*', '\n', el.tail)
el.tail = re.sub(r'[ \t]+', ' ', el.tail)
# el.tail = re.sub(r'\s+', ' ', el.tail)
def clean_html(self, html):
"""Apply ``Cleaner`` to HTML string or document and return a cleaned string or document."""
result_type = type(html)
if isinstance(html, six.string_types):
doc = html_fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
if issubclass(result_type, six.binary_type):
return tostring(doc, encoding='utf-8')
elif issubclass(result_type, six.text_type):
return tostring(doc, encoding='unicode')
else:
return doc
def clean_markup(self, markup, parser=None):
"""Apply ``Cleaner`` to markup string or document and return a cleaned string or document."""
result_type = type(markup)
if isinstance(markup, six.string_types):
doc = fromstring(markup, parser=parser)
else:
doc = copy.deepcopy(markup)
self(doc)
if issubclass(result_type, six.binary_type):
return tostring(doc, encoding='utf-8')
elif issubclass(result_type, six.text_type):
return tostring(doc, encoding='unicode')
else:
return doc
#: A default Cleaner instance, which kills comments, processing instructions, script tags, style tags.
clean = Cleaner()
#: Convenience function for applying ``clean`` to a string.
clean_markup = clean.clean_markup
#: Convenience function for applying ``clean`` to a HTML string.
clean_html = clean.clean_html
#: A Cleaner instance that is configured to strip all tags, replacing them with their text contents.
strip = Cleaner(strip_xpath='.//*')
#: Convenience function for applying ``strip`` to a string.
strip_markup = strip.clean_markup
#: Convenience function for applying ``strip`` to a HTML string.
strip_html = strip.clean_html
| |
cpuusage.go
|
// +build netbsd
package netbsd
import (
"fmt"
"os/exec"
"strconv"
"strings"
"github.com/mackerelio/mackerel-agent/logging"
"github.com/mackerelio/mackerel-agent/metrics"
)
// CPUUsageGenerator XXX
type CPUUsageGenerator struct {
}
var cpuUsageLogger = logging.GetLogger("metrics.cpuUsage")
var iostatFieldToMetricName = []string{"user", "nice", "system", "interrupt", "idle"}
// Generate returns current CPU usage of the host.
// Keys below are expected:
// - cpu.user.percentage
// - cpu.system.percentage
// - cpu.idle.percentage
func (g *CPUUsageGenerator) Generate() (metrics.Values, error) {
// % iostat -c2 -C
// CPU
// us ni sy in id
// 0 0 0 0 100
// 0 0 0 0 100
iostatBytes, err := exec.Command("iostat", "-c2", "-d", "-C").Output()
if err != nil {
cpuUsageLogger.Errorf("Failed to invoke iostat: %s", err)
return nil, err
}
iostat := string(iostatBytes)
|
}
fields := strings.Fields(lines[3])
if len(fields) < len(iostatFieldToMetricName) {
return nil, fmt.Errorf("iostat result malformed: [%q]", iostat)
}
cpuUsage := make(map[string]float64, len(iostatFieldToMetricName))
for i, n := range iostatFieldToMetricName {
if i == 3 {
continue
}
value, err := strconv.ParseFloat(fields[i], 64)
if err != nil {
return nil, err
}
cpuUsage["cpu."+n+".percentage"] = value
}
return metrics.Values(cpuUsage), nil
}
|
lines := strings.Split(iostat, "\n")
if len(lines) != 5 {
return nil, fmt.Errorf("iostat result malformed: [%q]", iostat)
|
test.js
|
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var Foo2 = (function () {
function
|
(x) {
this.foo = x;
}
Foo2.prototype.aaa = function () {
};
Foo2.prototype._bbb = function () {
console.log(this);
};
return Foo2;
}());
var f = new Foo2('ok');
var Goo = (function (_super) {
__extends(Goo, _super);
function Goo() {
var _this = _super.call(this, 'x') || this;
_this.great = 5;
return _this;
}
Goo.prototype.blup = function () {
};
return Goo;
}(Foo2));
|
Foo2
|
templates.go
|
package runner
import (
"fmt"
"os"
"strings"
"github.com/karrick/godirwalk"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v2/internal/severity"
"github.com/projectdiscovery/nuclei/v2/pkg/parsers"
"github.com/projectdiscovery/nuclei/v2/pkg/types"
)
func (r *Runner) templateLogMsg(id, name, author string, templateSeverity severity.Severity) string {
// Display the message for the template
return fmt.Sprintf("[%s] %s (%s) [%s]",
r.colorizer.BrightBlue(id).String(),
r.colorizer.Bold(name).String(),
r.colorizer.BrightYellow(appendAtSignToAuthors(author)).String(),
r.addColor(templateSeverity))
}
// appendAtSignToAuthors appends @ before each author and returns final string
func appendAtSignToAuthors(author string) string
|
func (r *Runner) logAvailableTemplate(tplPath string) {
t, err := parsers.ParseTemplate(tplPath)
if err != nil {
gologger.Error().Msgf("Could not parse file '%s': %s\n", tplPath, err)
} else {
gologger.Print().Msgf("%s\n", r.templateLogMsg(t.ID,
types.ToString(t.Info.Name),
types.ToString(t.Info.Authors),
t.Info.SeverityHolder.Severity))
}
}
// ListAvailableTemplates prints available templates to stdout
func (r *Runner) listAvailableTemplates() {
if r.templatesConfig == nil {
return
}
if _, err := os.Stat(r.templatesConfig.TemplatesDirectory); os.IsNotExist(err) {
gologger.Error().Msgf("%s does not exists", r.templatesConfig.TemplatesDirectory)
return
}
gologger.Print().Msgf(
"\nListing available v.%s nuclei templates for %s",
r.templatesConfig.CurrentVersion,
r.templatesConfig.TemplatesDirectory,
)
err := directoryWalker(
r.templatesConfig.TemplatesDirectory,
func(path string, d *godirwalk.Dirent) error {
if d.IsDir() && path != r.templatesConfig.TemplatesDirectory {
gologger.Print().Msgf("\n%s:\n\n", r.colorizer.Bold(r.colorizer.BgBrightBlue(d.Name())).String())
} else if strings.HasSuffix(path, ".yaml") {
r.logAvailableTemplate(path)
}
return nil
},
)
// directory couldn't be walked
if err != nil {
gologger.Error().Msgf("Could not find templates in directory '%s': %s\n", r.templatesConfig.TemplatesDirectory, err)
}
}
func directoryWalker(fsPath string, callback func(fsPath string, d *godirwalk.Dirent) error) error {
return godirwalk.Walk(fsPath, &godirwalk.Options{
Callback: callback,
ErrorCallback: func(fsPath string, err error) godirwalk.ErrorAction {
return godirwalk.SkipNode
},
Unsorted: true,
})
}
|
{
authors := strings.Split(author, ",")
if len(authors) == 0 {
return "@none"
}
if len(authors) == 1 {
if !strings.HasPrefix(authors[0], "@") {
return fmt.Sprintf("@%s", authors[0])
}
return authors[0]
}
values := make([]string, 0, len(authors))
for _, k := range authors {
if !strings.HasPrefix(authors[0], "@") {
values = append(values, fmt.Sprintf("@%s", k))
} else {
values = append(values, k)
}
}
return strings.Join(values, ",")
}
|
domain.py
|
# Copyright 2017 Joachim van der Herten
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from itertools import chain
from gpflow.param import Parentable
from .transforms import LinearTransform
class Domain(Parentable):
"""
A domain representing the mathematical space over which is optimized.
"""
def __init__(self, parameters):
super(Domain, self).__init__()
self._parameters = parameters
@property
def lower(self):
"""
Lower bound of the domain, corresponding to a numpy array with the lower value of each parameter
"""
return np.array(list(map(lambda param: param.lower, self._parameters))).flatten()
@property
def upper(self):
"""
Upper bound of the domain, corresponding to a numpy array with the upper value of each parameter
"""
return np.array(list(map(lambda param: param.upper, self._parameters))).flatten()
def __add__(self, other):
assert isinstance(other, Domain)
return Domain(self._parameters + other._parameters)
@property
def size(self):
"""
Returns the dimensionality of the domain
"""
return sum(map(lambda param: param.size, self._parameters))
def __setattr__(self, key, value):
super(Domain, self).__setattr__(key, value)
if key is not '_parent':
if isinstance(value, Parentable):
value._parent = self
if isinstance(value, list):
for val in (x for x in value if isinstance(x, Parentable)):
val._parent = self
def __eq__(self, other):
return self._parameters == other._parameters
def __contains__(self, X):
|
def __iter__(self):
for v in chain(*map(iter, self._parameters)):
yield v
def __getitem__(self, items):
if isinstance(items, list):
return np.sum([self[item] for item in items])
if isinstance(items, str):
labels = [param.label for param in self._parameters]
items = labels.index(items)
return self._parameters[items]
def __rshift__(self, other):
assert(self.size == other.size)
A = (other.upper - other.lower) / (self.upper - self.lower)
b = -self.upper * A + other.upper
return LinearTransform(A, b)
@property
def value(self):
return np.vstack(map(lambda p: p.value, self._parameters)).T
@value.setter
def value(self, x):
x = np.atleast_2d(x)
assert (len(x.shape) == 2)
assert (x.shape[1] == self.size)
offset = 0
for p in self._parameters:
p.value = x[:, offset:offset + p.size]
offset += p.size
def _repr_html_(self):
"""
Build html string for table display in jupyter notebooks.
"""
html = ["<table id='domain' width=100%>"]
# Table header
columns = ['Name', 'Type', 'Values']
header = "<tr>"
header += ''.join(map(lambda l: "<td>{0}</td>".format(l), columns))
header += "</tr>"
html.append(header)
# Add parameters
html.append(self._html_table_rows())
html.append("</table>")
return ''.join(html)
def _html_table_rows(self):
return ''.join(map(lambda l: l._html_table_rows(), self._parameters))
class Parameter(Domain):
"""
Abstract class representing a parameter (which corresponds to a one-dimensional domain)
This class can be derived for continuous, discrete and categorical parameters
"""
def __init__(self, label, xinit):
super(Parameter, self).__init__([self])
self.label = label
self._x = np.atleast_1d(xinit)
@Domain.size.getter
def size(self):
"""
One parameter has a dimensionality of 1
:return: 1
"""
return 1
def __iter__(self):
yield self
@Domain.value.getter
def value(self):
return self._x
@value.setter
def value(self, x):
x = np.atleast_1d(x)
self._x = x.ravel()
def _html_table_rows(self):
"""
Html row representation of a Parameter. Should be overwritten in subclasses objects.
"""
return "<tr><td>{0}</td><td>{1}</td><td>{2}</td></tr>".format(self.label, 'N/A', 'N/A')
class ContinuousParameter(Parameter):
def __init__(self, label, lb, ub, xinit=None):
self._range = np.array([lb, ub], dtype=float)
super(ContinuousParameter, self).__init__(label, xinit or ((ub + lb) / 2.0))
@Parameter.lower.getter
def lower(self):
return np.array([self._range[0]])
@Parameter.upper.getter
def upper(self):
return np.array([self._range[1]])
@lower.setter
def lower(self, value):
self._range[0] = value
@upper.setter
def upper(self, value):
self._range[1] = value
def __eq__(self, other):
return isinstance(other, ContinuousParameter) and self.lower == other.lower and self.upper == other.upper
def _html_table_rows(self):
"""
Html row representation of a ContinuousParameter.
"""
return "<tr><td>{0}</td><td>{1}</td><td>{2}</td></tr>".format(self.label, 'Continuous', str(self._range))
class UnitCube(Domain):
"""
The unit domain [0, 1]^d
"""
def __init__(self, n_inputs):
params = [ContinuousParameter('u{0}'.format(i), 0, 1) for i in np.arange(n_inputs)]
super(UnitCube, self).__init__(params)
|
X = np.atleast_2d(X)
if X.shape[1] is not self.size:
return False
return np.all(np.logical_and(np.logical_or(self.lower < X, np.isclose(self.lower, X)),
np.logical_or(X < self.upper, np.isclose(self.upper, X))))
|
ha-logbook.ts
|
import "../../components/ha-icon";
import { formatTimeWithSeconds } from "../../common/datetime/format_time";
import { formatDate } from "../../common/datetime/format_date";
import { domainIcon } from "../../common/entity/domain_icon";
import { stateIcon } from "../../common/entity/state_icon";
import { computeRTL } from "../../common/util/compute_rtl";
import {
LitElement,
html,
property,
TemplateResult,
CSSResult,
css,
PropertyValues,
} from "lit-element";
import { HomeAssistant } from "../../types";
import { fireEvent } from "../../common/dom/fire_event";
import "lit-virtualizer";
import { LogbookEntry } from "../../data/logbook";
class HaLogbook extends LitElement {
@property() public hass!: HomeAssistant;
@property() public entries: LogbookEntry[] = [];
@property({ attribute: "rtl", type: Boolean, reflect: true })
// @ts-ignore
private _rtl = false;
protected shouldUpdate(changedProps: PropertyValues) {
const oldHass = changedProps.get("hass") as HomeAssistant | undefined;
const languageChanged =
oldHass === undefined || oldHass.language !== this.hass.language;
return changedProps.has("entries") || languageChanged;
}
protected updated(_changedProps: PropertyValues) {
this._rtl = computeRTL(this.hass);
}
protected render(): TemplateResult {
if (!this.entries?.length) {
return html`
${this.hass.localize("ui.panel.logbook.entries_not_found")}
`;
}
return html`
<lit-virtualizer
.items=${this.entries}
.renderItem=${(item: LogbookEntry, index: number) =>
this._renderLogbookItem(item, index)}
style="height: 100%;"
></lit-virtualizer>
`;
}
private _renderLogbookItem(
item: LogbookEntry,
index: number
): TemplateResult {
const previous = this.entries[index - 1];
const state = item.entity_id ? this.hass.states[item.entity_id] : undefined;
return html`
<div>
${index === 0 ||
(item?.when &&
previous?.when &&
new Date(item.when).toDateString() !==
new Date(previous.when).toDateString())
? html`
<h4 class="date">
${formatDate(new Date(item.when), this.hass.language)}
</h4>
`
: html``}
<div class="entry">
<div class="time">
${formatTimeWithSeconds(new Date(item.when), this.hass.language)}
</div>
<ha-icon
.icon=${state ? stateIcon(state) : domainIcon(item.domain)}
></ha-icon>
<div class="message">
${!item.entity_id
? html`
<span class="name">${item.name}</span>
`
: html`
<a
href="#"
@click=${this._entityClicked}
.entityId=${item.entity_id}
class="name"
>
${item.name}
</a>
`}
<span>${item.message}</span>
</div>
</div>
</div>
`;
}
private _entityClicked(ev: Event) {
ev.preventDefault();
fireEvent(this, "hass-more-info", {
entityId: (ev.target as any).entityId,
});
}
static get styles(): CSSResult {
return css`
:host {
display: block;
height: 100%;
}
:host([rtl]) {
direction: ltr;
}
.entry {
display: flex;
line-height: 2em;
}
.time {
width: 65px;
flex-shrink: 0;
font-size: 0.8em;
color: var(--secondary-text-color);
}
:host([rtl]) .date {
direction: rtl;
}
ha-icon {
|
}
.message {
color: var(--primary-text-color);
}
a {
color: var(--primary-color);
}
`;
}
}
customElements.define("ha-logbook", HaLogbook);
|
margin: 0 8px 0 16px;
flex-shrink: 0;
color: var(--primary-text-color);
|
repo_participant.rs
|
use std::path::Path;
use crate::metrics;
use crate::report::repo_info::RepoInfo;
use crate::report::Report;
use crate::util::percentage;
use fehler::throws;
use serde::Deserialize;
use stable_eyre::eyre::{Error, WrapErr};
use super::ReportConfig;
#[derive(Debug, Deserialize)]
pub struct RepoParticipants {
pub participants: Vec<RepoParticipant>,
}
#[derive(Debug, Deserialize)]
pub struct RepoParticipant {
#[serde(rename = "#")]
pub row: usize,
#[serde(rename = "Participant")]
pub participant: String,
#[serde(rename = "Repository")]
pub repo: String,
#[serde(rename = "PRs participated in")]
pub participated_in: u64,
#[serde(rename = "PRs authored")]
pub authored: u64,
#[serde(rename = "PRs reviewed")]
pub reviewed: u64,
#[serde(rename = "PRs resolved")]
pub resolved: u64,
}
impl Report {
#[throws]
pub(super) async fn repo_participants(&self, config: &ReportConfig) -> RepoParticipants {
let input_dir = self.input_dir();
let repo_participants = input_dir.join("repo-participants.csv");
let graphql = self.graphql("repo-participants");
self.produce_input(
&repo_participants,
metrics::RepoParticipants::new(
graphql,
config.github.org.clone(),
config.github.repos.clone(),
config.data_source.start_date.clone(),
config.data_source.end_date.clone(),
),
)
.await
.wrap_err_with(|| format!("Failed to produce input data for {:?}", &repo_participants))?;
tokio::task::spawn_blocking(move || {
RepoParticipants::parse_participants(&repo_participants)
})
.await
.wrap_err("Failed to parse repo participants")??
}
}
impl RepoParticipants {
#[throws]
fn parse_participants(repo_participants: &Path) -> Self {
let mut rdr = csv::Reader::from_path(repo_participants).wrap_err_with(|| {
format!("Failed to create reader from path {:?}", &repo_participants)
})?;
let mut vec = Vec::new();
for result in rdr.deserialize() {
let record: RepoParticipant =
|
}
}
Self { participants: vec }
}
/// Finds the participant with the maximum value for `key`.
pub fn top_participant(
&self,
repo_info: &RepoInfo,
key: impl Fn(&RepoParticipant) -> u64,
) -> (String, u64) {
match self.in_repo(repo_info).max_by_key(|p| key(p)) {
Some(p) => (p.participant.clone(), percentage(key(p), repo_info.num_prs)),
None => ("N/A".to_string(), 0),
}
}
/// Finds the participant with the maximum value for `key`.
pub fn in_repo<'me>(
&'me self,
repo_info: &'me RepoInfo,
) -> impl Iterator<Item = &'me RepoParticipant> + 'me {
self.participants
.iter()
.filter(move |p| p.repo == repo_info.repo)
}
}
impl RepoParticipant {
pub(super) fn reviewed_or_resolved(&self) -> u64 {
self.reviewed.max(self.resolved)
}
}
fn is_robot(login: &str) -> bool {
// FIXME: move to configuration
const ROBOTS: &[&str] = &[
"rust-highfive",
"bors",
"rustbot",
"rust-log-analyzer",
"rust-timer",
"rfcbot",
];
ROBOTS.contains(&login)
}
|
result.wrap_err("Failed to deserialize while parsing repo participants")?;
if !is_robot(&record.participant) {
vec.push(record);
|
models.ts
|
type BaseProps = {
networks?: any;
viewType?: "list" | "grid";
|
selectedNetworkIds?: string[];
isFilterChanged?: boolean;
};
export type FilterWalletsHookProps = {
update: (key: string, value: any) => void;
disabled: boolean;
} & BaseProps;
export type FilterWalletsProps = {
onChange?: (key: string, value: any) => void;
} & BaseProps;
|
useTestNetworks?: boolean;
walletsDisplayType?: string;
|
info.js
|
import Taro, { Component } from '@tarojs/taro'
import { View, Image } from '@tarojs/components'
import { connect } from '@tarojs/redux'
import { AtButton, AtInput, AtForm } from 'taro-ui'
import './index.scss'
@connect(({ bg, info }) => ({
...bg,
...info,
}))
export default class Info extends Component {
config = {
navigationBarTitleText: '戒烟助理'
};
componentWillMount () {
}
onAmount = e => {
console.log(e)
this.props.dispatch({
type: 'info/save',
payload: {
amount: e
},
})
};
onUnitprice = e => {
console.log(e)
this.props.dispatch({
type: 'info/save',
payload: {
unitprice: e
},
})
};
onUnitamount = e => {
console.log(e)
this.props.dispatch({
type: 'info/save',
payload: {
unitamount: e
},
})
};
onSubmit = () => {
if (this.props.amount &&
this.props.unitprice &&
this.props.unitamount) {
Taro.setStorageSync('amount', this.props.amount)
Taro.setStorageSync('unitprice', this.props.amount)
Taro.setStorageSync('unitamount', this.props.amount)
Taro.setStorageSync('days', 0)
Taro.navigateTo({
url: '/pages/quitSmoking/doing'
})
} else {
Taro.showModal({ content: `请输入评估信息`, showCancel: false })
}
};
onBack () {
|
Taro.navigateBack()
}
render () {
const { amount, unitprice, unitamount, imgList } = this.props
return (
<View className='body'>
<Image className='background' src={imgList[Math.floor(Math.random() * 3)].hoverURL}></Image>
<View className='main'>
<View className='panel'>
<View className='component-item'>
<AtForm>
<AtInput name='amount' title='支数' type='number' placeholder='每天吸烟支数' value={amount} onChange={this.onAmount} />
<AtInput name='unitprice' title='单价' type='number' placeholder='香烟单价' value={unitprice} onChange={this.onUnitprice} />
<AtInput name='unitamount' title='每包支数' type='number' placeholder='每包烟支数' value={unitamount} onChange={this.onUnitamount} />
<View className='btn-item'>
<AtButton type='primary' onClick={this.onSubmit}>提交</AtButton>
<AtButton type='secondary' onClick={this.onBack}>返回</AtButton>
</View>
</AtForm>
</View>
</View>
</View>
</View>
)
}
}
| |
cli-test.gradle.spec.ts
|
import * as sinon from 'sinon';
import { legacyPlugin as pluginApi } from '@snyk/cli-interface';
import { AcceptanceTests } from './cli-test.acceptance.test';
import { CommandResult } from '../../../src/cli/commands/types';
import { createCallGraph } from '../../utils';
import * as fs from 'fs';
import * as path from 'path';
const readJSON = (jsonPath: string) => {
return JSON.parse(
fs.readFileSync(path.resolve(__dirname, jsonPath), 'utf-8'),
);
};
export const GradleTests: AcceptanceTests = {
language: 'Gradle',
tests: {
'`test gradle-kotlin-dsl-app` returns correct meta': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const plugin = {
async inspect() {
return {
package: {},
plugin: { name: 'testplugin', runtime: 'testruntime' },
};
},
};
sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
const commandResult: CommandResult = await params.cli.test(
'gradle-kotlin-dsl-app',
);
const res: string = commandResult.getDisplayResults();
const meta = res.slice(res.indexOf('Organization:')).split('\n');
t.match(meta[0], /Organization:\s+test-org/, 'organization displayed');
t.match(
meta[1],
/Package manager:\s+gradle/,
'package manager displayed',
);
t.match(
meta[2],
/Target file:\s+build.gradle.kts/,
'target file displayed',
);
t.match(meta[3], /Open source:\s+no/, 'open source displayed');
t.match(
meta[4],
/Project path:\s+gradle-kotlin-dsl-app/,
'path displayed',
);
t.notMatch(
meta[5],
/Local Snyk policy:\s+found/,
'local policy not displayed',
);
},
'`test gradle-app` returns correct meta': (params, utils) => async (t) => {
utils.chdirWorkspaces();
const plugin = {
async inspect() {
return {
package: {},
plugin: { name: 'testplugin', runtime: 'testruntime' },
};
},
};
const spyPlugin = sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
const commandResult: CommandResult = await params.cli.test('gradle-app');
const res = commandResult.getDisplayResults();
const meta = res.slice(res.indexOf('Organization:')).split('\n');
t.false(
((spyPlugin.args[0] as any)[2] as any).allSubProjects,
'`allSubProjects` option is not sent',
);
t.match(meta[0], /Organization:\s+test-org/, 'organization displayed');
t.match(
meta[1],
/Package manager:\s+gradle/,
'package manager displayed',
);
t.match(meta[2], /Target file:\s+build.gradle/, 'target file displayed');
t.match(meta[3], /Open source:\s+no/, 'open source displayed');
t.match(meta[4], /Project path:\s+gradle-app/, 'path displayed');
t.notMatch(
meta[5],
/Local Snyk policy:\s+found/,
'local policy not displayed',
);
},
'`test gradle-app --reachable-vulns` sends call graph': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const callGraphPayload = readJSON('../fixtures/call-graphs/maven.json');
const callGraph = createCallGraph(callGraphPayload);
const plugin = {
async inspect() {
return {
package: {},
plugin: { name: 'testplugin', runtime: 'testruntime' },
callGraph,
};
},
};
const spyPlugin = sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
await params.cli.test('gradle-app', {
reachableVulns: true,
});
const req = params.server.popRequest();
t.equal(req.method, 'POST', 'makes POST request');
t.equal(
req.headers['x-snyk-cli-version'],
params.versionNumber,
'sends version number',
);
t.match(req.url, '/test-dep-graph', 'posts to correct url');
t.match(req.body.targetFile, undefined, 'target is undefined');
t.equal(req.body.depGraph.pkgManager.name, 'gradle');
t.deepEqual(
req.body.callGraph,
callGraphPayload,
'correct call graph sent',
);
t.same(
spyPlugin.getCall(0).args,
[
'gradle-app',
'build.gradle',
{
args: null,
file: 'build.gradle',
org: null,
projectName: null,
packageManager: 'gradle',
path: 'gradle-app',
showVulnPaths: 'some',
reachableVulns: true,
},
],
'calls gradle plugin',
);
},
'`test gradle-app --reachable-vulns and --init-script` sends call graph': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const callGraphPayload = readJSON('../fixtures/call-graphs/maven.json');
const callGraph = createCallGraph(callGraphPayload);
const plugin = {
async inspect() {
return {
package: {},
plugin: { name: 'testplugin', runtime: 'testruntime' },
callGraph,
};
},
};
const spyPlugin = sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
await params.cli.test('gradle-app', {
reachableVulns: true,
initScript: 'somescript.gradle',
});
const req = params.server.popRequest();
t.equal(req.method, 'POST', 'makes POST request');
t.equal(
req.headers['x-snyk-cli-version'],
params.versionNumber,
'sends version number',
);
t.match(req.url, '/test-dep-graph', 'posts to correct url');
t.match(req.body.targetFile, undefined, 'target is undefined');
t.equal(req.body.depGraph.pkgManager.name, 'gradle');
t.deepEqual(
req.body.callGraph,
callGraphPayload,
'correct call graph sent',
);
t.same(
spyPlugin.getCall(0).args,
[
'gradle-app',
'build.gradle',
{
args: null,
file: 'build.gradle',
org: null,
projectName: null,
packageManager: 'gradle',
|
path: 'gradle-app',
showVulnPaths: 'some',
reachableVulns: true,
initScript: 'somescript.gradle',
},
],
'calls gradle plugin',
);
},
'`test gradle-app --all-sub-projects` sends `allSubProjects` argument to plugin': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const plugin = {
async inspect() {
return { plugin: { name: 'gradle' }, package: {} };
},
};
const spyPlugin = sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
await params.cli.test('gradle-app', {
allSubProjects: true,
});
t.true(((spyPlugin.args[0] as any)[2] as any).allSubProjects);
},
'`test gradle-app --all-sub-projects` with policy': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const plugin = {
async inspect() {
return { plugin: { name: 'gradle' }, package: {} };
},
};
const spyPlugin = sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
await params.cli.test('gradle-app', {
allSubProjects: true,
});
t.true(((spyPlugin.args[0] as any)[2] as any).allSubProjects);
let policyCount = 0;
params.server
.getRequests()
.filter((r) => r.url === '/api/v1/test-dep-graph?org=')
.forEach((req) => {
if (
req.body.displayTargetFile.endsWith('gradle-multi-project/subproj')
) {
// TODO: this should return 1 policy when fixed
// uncomment then
// t.match(
// req.body.policy,
// 'SNYK-JAVA-ORGBOUNCYCASTLE-32364',
// 'policy is found & sent',
// );
t.ok(
req.body.policy,
undefined,
'policy is not found even though it should be',
);
policyCount += 1;
}
t.match(req.url, '/test-dep-graph', 'posts to correct url');
});
// TODO: this should return 1 policy when fixed
t.equal(policyCount, 0, 'one sub-project policy found & sent');
},
'`test gradle-app` plugin fails to return package or scannedProjects': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const plugin = {
async inspect() {
return { plugin: { name: 'gradle' } };
},
};
sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
try {
await params.cli.test('gradle-app', {});
t.fail('expected error');
} catch (error) {
t.match(
error,
/error getting dependencies from gradle plugin: neither 'package' nor 'scannedProjects' were found/,
'error found',
);
}
},
'`test gradle-app --all-sub-projects` returns correct multi tree meta': (
params,
utils,
) => async (t) => {
utils.chdirWorkspaces();
const plugin = {
async inspect(): Promise<pluginApi.MultiProjectResult> {
return {
plugin: {
meta: {
allSubProjectNames: ['a', 'b'],
},
name: 'gradle',
},
scannedProjects: [
{
depTree: {
name: 'tree0',
version: '1.0.0',
dependencies: { dep1: { name: 'dep1', version: '1' } },
},
},
{
depTree: {
name: 'tree1',
version: '2.0.0',
dependencies: { dep1: { name: 'dep2', version: '2' } },
},
},
],
};
},
};
const spyPlugin = sinon.spy(plugin, 'inspect');
const loadPlugin = sinon.stub(params.plugins, 'loadPlugin');
t.teardown(loadPlugin.restore);
loadPlugin.withArgs('gradle').returns(plugin);
const commandResult: CommandResult = await params.cli.test('gradle-app', {
allSubProjects: true,
});
const res = commandResult.getDisplayResults();
t.true(
((spyPlugin.args[0] as any)[2] as any).allSubProjects,
'`allSubProjects` option is sent',
);
const tests = res
.split('Testing gradle-app...')
.filter((s) => !!s.trim());
t.equals(tests.length, 2, 'two projects tested independently');
t.match(
res,
/Tested 2 projects/,
'number projects tested displayed properly',
);
t.notMatch(
res,
/use --all-sub-projects flag to scan all sub-projects/,
'all-sub-projects flag is NOT suggested as we already scanned with it',
);
for (let i = 0; i < tests.length; i++) {
const meta = tests[i]
.slice(tests[i].indexOf('Organization:'))
.split('\n');
t.match(meta[0], /Organization:\s+test-org/, 'organization displayed');
t.match(
meta[1],
/Package manager:\s+gradle/,
'package manager displayed',
);
t.match(
meta[2],
/Target file:\s+build.gradle/,
'target file displayed',
);
t.match(meta[3], /Project name:\s+tree/, 'sub-project displayed');
t.includes(meta[3], `tree${i}`, 'sub-project displayed');
t.match(meta[4], /Open source:\s+no/, 'open source displayed');
t.match(meta[5], /Project path:\s+gradle-app/, 'path displayed');
t.notMatch(
meta[6],
/Local Snyk policy:\s+found/,
'local policy not displayed',
);
}
},
},
};
| |
test_git_head_info.py
|
#!/usr/bin/env python
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-
from bes.testing.unit_test import unit_test
from bes.git.git_head_info import git_head_info
class test_git_head_info(unit_test):
def test_parse_head_info(self):
f = git_head_info.parse_head_info
self.assertEqual( ( 'branch', 'release-beta-14-studio-fixes', None, '9038154f', 'track branch release-beta-14-studio-fixes [skip ci]', None ),
f(None, '* release-beta-14-studio-fixes 9038154f track branch release-beta-14-studio-fixes [skip ci]') )
self.assertEqual( ( 'branch', 'b1', None, 'b59bc43', 'message 1', None ),
f(None, '* b1 b59bc43 message 1') )
def test_parse_head_info_master(self):
f = git_head_info.parse_head_info
self.assertEqual( ( 'branch', 'master', None, 'deadbeef', 'fix foo.', None ),
f(None, '* master deadbeef fix foo.') )
def test_match_ref_branches(self):
h = git_head_info('tag', None, 'builds/foo/1.2.3', 'deadbeef', 'foo', [ 'master', 'release-beta-26', 'release-beta-27' ])
self.assertEqual( [ 'release-beta-26', 'release-beta-27' ], h.match_ref_branches([ 'release-beta-*' ]) )
def test_parse_head_info_detached_tag(self):
output = '''\
|
* (HEAD detached at 1.2.3) deadbeef fixed stuff
foo
master
zoo
'''
self.assertEqual( ( 'tag', None, '1.2.3', 'deadbeef', 'fixed stuff', None ),
git_head_info.parse_head_info(None, output) )
def test_parse_head_info_detached_commit(self):
output = '''\
* (HEAD detached at deadbeef) deadbeef fixed stuff
foo
master
zoo
'''
self.assertEqual( ( 'detached_commit', None, None, 'deadbeef', 'fixed stuff', None ),
git_head_info.parse_head_info(None, output) )
if __name__ == '__main__':
unit_test.main()
| |
index.tsx
|
import { faEye, faEyeSlash } from '@fortawesome/free-regular-svg-icons';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import clsx from 'clsx';
import React, { ChangeEvent, ReactElement, useState } from 'react';
import css from '../../index.module.css';
interface IProps {
value?: string;
onChange?: (event: ChangeEvent<HTMLInputElement>) => void;
name?: string;
className?: string;
|
const Input = ({ value, onChange, className, name, placeholder, error }: IProps): ReactElement => {
const [type, setType] = useState<'text' | 'password'>('password');
const handleType = (): void => {
setType(value => (value === 'text' ? 'password' : 'text'));
};
return (
<>
<div className={css.wrp}>
<button className={css.btn} onClick={handleType} type="button">
<FontAwesomeIcon icon={type === 'text' ? faEye : faEyeSlash} />
</button>
<input
id={name}
name={name}
value={value}
onChange={onChange}
className={clsx(css.input, className, error && css.inputError)}
placeholder={placeholder}
type={type}
/>
</div>
{error && <span className={css.passError}>{error}</span>}
</>
);
};
export default Input;
|
placeholder?: string;
error?: string;
}
|
axis.rs
|
cpp! {{
#include <CNTKLibrary.h>
#include <cstdio>
#include <vector>
using namespace CNTK;
using namespace std;
}}
pub(super) type AxisInner = [u64; 6usize];
pub struct Axis {
pub(super) payload: AxisInner
}
impl Axis {
pub fn all() -> Axis {
Axis {
payload: unsafe {
cpp!([] -> AxisInner as "Axis" {
return Axis::AllAxes();
})
}
}
}
pub fn default_batch_axis() -> Axis {
Axis {
payload: unsafe {
cpp!([] -> AxisInner as "Axis" {
return Axis::DefaultBatchAxis();
})
}
}
}
pub fn named_dynamic(name: &str) -> Axis {
let name_ptr = name.as_ptr();
let name_len = name.len();
Axis {
payload: unsafe {
cpp!([name_ptr as "char*", name_len as "size_t"] -> AxisInner as "Axis" {
string name(name_ptr, name_ptr + name_len);
wstring wname;
wname.assign(name.begin(), name.end());
return Axis(wname);
})
}
}
}
pub fn all_static() -> Axis {
Axis {
payload: unsafe {
cpp!([] -> AxisInner as "Axis" {
return Axis::AllStaticAxes();
})
}
}
|
pub fn new(number: i32) -> Axis {
Axis {
payload: unsafe {
cpp!([number as "int"] -> AxisInner as "Axis" {
return Axis(number);
})
}
}
}
}
impl Drop for Axis {
fn drop(&mut self) {
let payload = self.payload;
unsafe {
cpp!([payload as "Axis"] {
payload.~Axis();
})
};
}
}
impl Clone for Axis {
fn clone(&self) -> Self {
let xpayload = self.payload;
let payload = unsafe {
cpp!([xpayload as "Axis"] -> AxisInner as "Axis" {
return xpayload;
})
};
Axis {payload}
}
}
|
}
|
day2.rs
|
mod prelude;
use prelude::*;
fn main() {
let course = std::io::stdin()
.lock()
.lines()
.map(|line| line.expect("invalid line"))
.collect();
|
println!("{}", simulate_course(&course, basic));
println!("{}", simulate_course(&course, aim));
}
struct Simulation {
x: i32,
depth: i32,
aim: i32
}
type Simulator = fn(&mut Simulation, direction: &str, value: i32);
fn basic(sim: &mut Simulation, direction: &str, value: i32) {
match direction {
"forward" => { sim.x += value; },
"up" => { sim.depth -= value; },
"down" => { sim.depth += value; },
_ => {}
}
}
fn aim(sim: &mut Simulation, direction: &str, value: i32) {
match direction {
"forward" => { sim.x += value; sim.depth += sim.aim * value; },
"up" => { sim.aim -= value; },
"down" => { sim.aim += value; },
_ => {}
}
}
fn simulate_course<T: AsRef<str>>(v: &Vec<T>, simulate: Simulator) -> i32 {
let mut state = Simulation {
x: 0,
depth: 0,
aim: 0
};
for v in v.iter() {
let mut parts = v.as_ref().split(" ");
let direction = parts.next().expect("missing direction");
let value = parts.next().map(|p| p.parse::<i32>().expect("invalid value")).expect("missing value");
simulate(&mut state, direction, value);
}
state.x * state.depth
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_1() {
let v = vec![
"forward 5",
"down 5",
"forward 8",
"up 3",
"down 8",
"forward 2"
];
assert_eq!(150, simulate_course(&v, basic));
}
#[test]
fn example_2() {
let v = vec![
"forward 5",
"down 5",
"forward 8",
"up 3",
"down 8",
"forward 2"
];
assert_eq!(900, simulate_course(&v, aim));
}
}
| |
gatsby-config.js
|
module.exports = {
siteMetadata: {
title: `Enso Digital`,
description: `We are a proud Canadian digital agency. Our mission is to create beautifully designed digital products that generate striking brands.`,
author: `@ensodigital_ca`,
},
plugins: [
`gatsby-plugin-react-helmet`,
{
resolve: `gatsby-source-filesystem`,
options: {
name: `images`,
path: `${__dirname}/src/images`,
},
},
`gatsby-transformer-sharp`,
`gatsby-plugin-sharp`,
`gatsby-plugin-less`,
{
resolve: `gatsby-plugin-manifest`,
options: {
name: `gatsby-starter-default`,
short_name: `starter`,
start_url: `/`,
background_color: `#ffffff`,
theme_color: `#000000`,
display: `minimal-ui`,
icon: `src/images/favicon.png`, // This path is relative to the root of the site.
},
},
|
// 'gatsby-plugin-offline',
],
}
|
// this (optional) plugin enables Progressive Web App + Offline functionality
// To learn more, visit: https://gatsby.app/offline
|
webaudio.rs
|
#![cfg(target_arch = "wasm32")]
use hashbrown::HashMap;
use notan_audio::AudioBackend;
use oddio::Frames;
use std::sync::Arc;
use crate::decoder::frames_from_bytes;
const WARN_TEXT: &str =
"AudioContext cannot be initiated until the user interacts with the webpage.";
/// Dummy audio backend used until the user interacts with the browser
/// This is due security policies of browsers who doesn't allow to
/// play video or sound until the user interacts directly with it
pub(crate) struct DummyAudioBackend {
pub id_count: u64,
pub volume: f32,
pub sources: HashMap<u64, Arc<Frames<[f32; 2]>>>,
}
impl DummyAudioBackend {
pub fn new() -> Self {
// Only on debug mode display a warning that the audio context needs an user's interaction to work
#[cfg(debug_assertions)]
{
log::warn!("DEBUG LOG: AudioContext cannot not be enabled until the user interact with the app.");
}
Self {
id_count: 0,
volume: 1.0,
sources: Default::default(),
}
}
}
impl AudioBackend for DummyAudioBackend {
fn set_global_volume(&mut self, volume: f32) {
log::error!("{}", WARN_TEXT);
self.volume = volume;
}
fn global_volume(&self) -> f32 {
self.volume
}
fn create_source(&mut self, bytes: &[u8]) -> Result<u64, String> {
let frames = frames_from_bytes(bytes)?;
let id = self.id_count;
self.sources.insert(id, frames);
self.id_count += 1;
Ok(id)
}
#[allow(unreachable_code)]
fn play_sound(&mut self, _source: u64, _volume: f32, _repeat: bool) -> Result<u64, String> {
log::error!("{}", WARN_TEXT);
#[cfg(debug_assertions)]
{
panic!("AudioContext needs an user's interaction to work.");
}
let id = self.id_count;
self.id_count += 1;
Ok(id)
}
fn pause(&mut self, _sound: u64) {
log::error!("{}", WARN_TEXT);
}
fn resume(&mut self, _sound: u64) {
log::error!("{}", WARN_TEXT);
}
fn stop(&mut self, _sound: u64) {
log::error!("{}", WARN_TEXT);
}
fn is_stopped(&mut self, _sound: u64) -> bool {
false
}
fn is_paused(&mut self, _sound: u64) -> bool
|
fn set_volume(&mut self, _sound: u64, _volume: f32) {
log::error!("{}", WARN_TEXT);
}
fn volume(&self, _sound: u64) -> f32 {
0.0
}
fn clean(&mut self, _sources: &[u64], _sounds: &[u64]) {
log::error!("{}", WARN_TEXT);
}
}
|
{
false
}
|
mod.rs
|
pub mod interact;
mod lines;
pub mod parse;
pub mod tty;
|
pub use self::parse::*;
| |
QueueStatusCommand.ts
|
import Worker from "../../../src/queue/Worker";
import Command from "../../../base/Command";
/**
* Get status of jobs in queue and failed jobs on given queues by name
*
* @class
*/
export default class QueueStatusCommand extends Command {
/**
* Integrated command flag
*
* @type {boolean}
*/
static integrated: boolean = true;
/**
* Static signature key that will be callable name of our command.
*
* @type {string}
*/
static signature: string = "queue:status";
/**
* User friendly description of the command that has to be static.
*
* @type {string}
*/
static description: string = "Get status of the queue instance";
/**
* Example of the command usage.
*
* @type {string}
*/
static usage: string = "command=queue:status name=example-queue";
/**
* Handler method of the command that will run the action.
*
* @return {Promise<{
* queue: string,
|
* }[]>}
*/
async handle(): Promise<{
queue: string,
pending: number,
failed: number,
}[]> {
if (typeof this.payload.name !== "string") {
throw new Error("QueueStatusCommand: please pass one or more queue names separated by comma");
}
const queues = this.payload.name.split(",");
const response = [];
for (const name of queues) {
response.push({
queue: name,
pending: await Worker.redis().llen(
Worker.queue(this.payload.name),
),
failed: await Worker.redis().llen(
Worker.queue(`failed:${this.payload.name}`),
),
});
}
if (this.cli === true) {
console.table(response.map((item) => ({
Queue: item.queue,
"Jobs in queue": item.pending,
"Jobs in failed list": item.failed,
})));
}
return response;
}
}
|
* pending: number,
* failed: number,
|
wrong_return_type.rs
|
use axum_macros::debug_handler;
#[debug_handler]
async fn handler() -> bool {
false
}
fn
|
() {}
|
main
|
product-intro.component.ts
|
import {
AfterContentChecked,
ChangeDetectionStrategy,
Component,
} from '@angular/core';
import { Product, TranslationService, WindowRef } from '@spartacus/core';
import { BehaviorSubject, Observable } from 'rxjs';
import { CurrentProductService } from '../current-product.service';
|
templateUrl: './product-intro.component.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ProductIntroComponent implements AfterContentChecked {
reviewsTabAvailable = new BehaviorSubject<boolean>(false);
product$: Observable<Product> = this.currentProductService.getProduct();
constructor(
protected currentProductService: CurrentProductService,
private translationService: TranslationService,
protected winRef: WindowRef
) {}
ngAfterContentChecked() {
this.reviewsTabAvailable.next(!!this.getReviewsComponent());
}
// Scroll to views component on page and click "Reviews" tab
showReviews() {
// Use translated label for Reviews tab reference
this.translationService
.translate('CMSTabParagraphContainer.tabs.ProductReviewsTabComponent')
.subscribe(reviewsTabLabel => {
const tabsComponent = this.getTabsComponent();
const reviewsTab = this.getTabByLabel(reviewsTabLabel, tabsComponent);
const reviewsComponent = this.getReviewsComponent();
if (reviewsTab && reviewsComponent) {
this.clickTabIfInactive(reviewsTab);
setTimeout(
() => reviewsComponent.scrollIntoView({ behavior: 'smooth' }),
0
);
}
})
.unsubscribe();
}
// NOTE: Does not currently exists as its own component
// but part of tabs component. This is likely to change in refactor.
private getReviewsComponent(): Element {
return this.winRef.document.querySelector('cx-product-reviews');
}
// Get Tabs Component if exists on page
private getTabsComponent(): Element {
return this.winRef.document.querySelector('cx-tab-paragraph-container');
}
// Click to activate tab if not already active
private clickTabIfInactive(tab: HTMLElement): void {
if (
!tab.classList.contains('active') ||
tab.classList.contains('toggled')
) {
tab.click();
}
}
// Get Tab by label if exists on page
private getTabByLabel(label: string, tabsComponent: Element): HTMLElement {
if (tabsComponent) {
// NOTE: Reads through h3 tags to click on correct tab
// There may be a better way of doing this now/after refactor
const h3Elements: HTMLCollectionOf<
HTMLElement
> = tabsComponent.getElementsByTagName('h3');
// Look through h3 tab elements until finding tab with label
for (const h3Element of Array.from(h3Elements)) {
if (h3Element.innerHTML.includes(label)) {
return h3Element;
}
}
}
}
}
|
@Component({
selector: 'cx-product-intro',
|
sm_request_type.go
|
package models
type RequestTypePageScheme struct {
Size int `json:"size,omitempty"`
Start int `json:"start,omitempty"`
Limit int `json:"limit,omitempty"`
IsLastPage bool `json:"isLastPage,omitempty"`
Values []*RequestTypeScheme `json:"values,omitempty"`
Expands []string `json:"_expands,omitempty"`
Links *RequestTypePageLinkScheme `json:"_links,omitempty"`
}
type RequestTypePageLinkScheme struct {
Self string `json:"self,omitempty"`
Base string `json:"base,omitempty"`
Context string `json:"context,omitempty"`
Next string `json:"next,omitempty"`
Prev string `json:"prev,omitempty"`
}
type ProjectRequestTypePageScheme struct {
Expands []string `json:"_expands,omitempty"`
Size int `json:"size,omitempty"`
Start int `json:"start,omitempty"`
Limit int `json:"limit,omitempty"`
IsLastPage bool `json:"isLastPage,omitempty"`
Values []*RequestTypeScheme `json:"values,omitempty"`
Links *ProjectRequestTypePageLinkScheme `json:"_links,omitempty"`
}
|
Prev string `json:"prev,omitempty"`
}
type RequestTypeScheme struct {
ID string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
HelpText string `json:"helpText,omitempty"`
IssueTypeID string `json:"issueTypeId,omitempty"`
ServiceDeskID string `json:"serviceDeskId,omitempty"`
GroupIds []string `json:"groupIds,omitempty"`
Expands []string `json:"_expands,omitempty"`
}
type RequestTypeFieldsScheme struct {
RequestTypeFields []*RequestTypeFieldScheme `json:"requestTypeFields,omitempty"`
CanRaiseOnBehalfOf bool `json:"canRaiseOnBehalfOf,omitempty"`
CanAddRequestParticipants bool `json:"canAddRequestParticipants,omitempty"`
}
type RequestTypeFieldScheme struct {
FieldID string `json:"fieldId,omitempty"`
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
Required bool `json:"required,omitempty"`
DefaultValues []*RequestTypeFieldValueScheme `json:"defaultValues,omitempty"`
ValidValues []*RequestTypeFieldValueScheme `json:"validValues,omitempty"`
JiraSchema *RequestTypeJiraSchema `json:"jiraSchema,omitempty"`
Visible bool `json:"visible,omitempty"`
}
type RequestTypeFieldValueScheme struct {
Value string `json:"value,omitempty"`
Label string `json:"label,omitempty"`
Children []interface{} `json:"children,omitempty"`
}
type RequestTypeJiraSchema struct {
Type string `json:"type,omitempty"`
Items string `json:"items,omitempty"`
System string `json:"system,omitempty"`
Custom string `json:"custom,omitempty"`
CustomID int `json:"customId,omitempty"`
}
|
type ProjectRequestTypePageLinkScheme struct {
Base string `json:"base,omitempty"`
Context string `json:"context,omitempty"`
Next string `json:"next,omitempty"`
|
example_gui_extension.py
|
try:
from PyQt5.QtWidgets import QToolBar, QLabel, QPushButton, QTextEdit, QWidget, QInputDialog
from PyQt5 import QtCore
except:
from PyQt4.QtGui import QToolBar, QLabel, QPushButton, QTextEdit, QWidget, QInputDialog
from PyQt4 import QtCore
from vqt.main import idlethread
from vqt.basics import VBox
from vqt.common import ACT
'''
This is an example of a vivisect GUI extension module.
Set the environment variable VIV_EXT_PATH to point at a
directory full of python modules such as this to extend
and implement your own vivisect features.
The extension should be a python module, either in the
form of a .py file or a directory with a __init__.py
file. Either way, the module will be loaded into
memory and the "vivExtension" function called.
'''
from PyQt5.QtWidgets import QToolBar, QLabel, QPushButton, QTextEdit
from vqt.main import idlethread
from vqt.basics import VBox
class ExampleToolbar(QToolBar):
def __init__(self, vw, vwgui):
self.vw = vw
self.vwgui = vwgui
QToolBar.__init__(self, parent=vwgui)
# Add a label to the toolbar
self.addWidget( QLabel('Example Toolbar:', parent=self) )
# Add an action button to the toolbar
self.addAction('ONE', self.doOne)
def doOne(self):
self.vw.vprint('did one!')
class ExampleWindow(QWidget):
def __init__(self, vw, vwgui):
self.vw = vw
self.vwgui = vwgui
QWidget.__init__(self, parent=vwgui)
# Set the window title
self.setWindowTitle('Example Window!')
# Add a Button and a Text Edit object in a basic VBox layout
button = QPushButton('My Button!', parent=self)
textedit = QTextEdit('WOOT! Some text!', parent=self)
self.setLayout( VBox(button, textedit) )
def vprint(vw, s, *args, **kwargs):
vw.vprint(s % args)
print(s % args)
def ctxMenuHook(vw, va, expr, menu, parent, nav):
'''
Context Menu handler (adds options as we wish)
'''
try:
if va == 0x41414141:
menu.addAction('WAT?', ACT(vw.vprint, "We're at AAAA!"))
menu.addAction('bookmark (B)', ACT(vw.getVivGui().addBookmark, va))
menu.addAction('YEEE HAH', ACT(vw.vprint, "YEE HAH %x %r %r %r %r" % (va, expr, menu, parent, nav)))
menu.addAction('YEEE HAH1', ACT(vprint, vw, "YEE HAH %x %r %r %r %r", va, expr, menu, parent, nav))
except Exception as e:
import traceback
traceback.print_exc()
class Crap:
'''
This is a helpful class for storing vw and vwgui and "doing the thing"
Currently Vivisect's Hot Keys are tied to the many gui widgets, so
vw and vwgui are not available when the "thing" is called.
|
def thing(self):
vprint(self.vw, "Blah Blah Blah")
def printUserInput(self):
# ok is whether the "OK" button was pressed, utext is the user text
utext, ok = QInputDialog.getText(self.vwgui, 'Enter...', 'User Text')
vprint(self.vw, '%r: %r', ok, utext)
@idlethread
def vivExtension(vw, vwgui):
# Create a toolbar and add it to the GUI
toolbar = ExampleToolbar(vw, vwgui)
vwgui.addToolBar(QtCore.Qt.TopToolBarArea, toolbar)
# Create a new Vivisect Dock Window (based on a QWidget)
window = ExampleWindow(vw, vwgui)
d = vwgui.vqDockWidget(window, floating=True)
d.resize(300,200)
# Add a menu item
vwgui.vqAddMenuField('&Example.&FooBar.&PrintDiscoveredStats', vw.printDiscoveredStats, ())
# hook context menu
vw.addCtxMenuHook('example', ctxMenuHook)
# add HotKeyTargets and HotKeys
tempmod = Crap(vw, vwgui)
vwgui.addHotKey('ctrl+p', 'file:hackme')
vwgui.addHotKeyTarget('file:hackme', tempmod.thing)
# Popups/Dialogs - add a menu entry to ask for input and print the output
vwgui.vqAddMenuField("&Example.&FooBar.&PrintUserInput", tempmod.printUserInput, ())
# get Dock Windows by name
for w, vqDW in vwgui.vqGetDockWidgetsByName('viv'):
vprint(vw, "Window: %r DockWidget: %r (%r)", w, vqDW, w.getEnviNavName())
|
'''
def __init__(self, vw, vwgui):
self.vw = vw
self.vwgui = vwgui
|
serializer.js
|
// Copyright 2007 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Protocol buffer serializer.
* @author [email protected] (Erik Arvidsson)
*/
// TODO(arv): Serialize booleans as 0 and 1
goog.provide('goog.proto.Serializer');
goog.require('goog.json.Serializer');
goog.require('goog.string');
/**
* Object that can serialize objects or values to a protocol buffer string.
* @constructor
* @extends {goog.json.Serializer}
* @final
*/
goog.proto.Serializer = function() {
goog.json.Serializer.call(this);
};
goog.inherits(goog.proto.Serializer, goog.json.Serializer);
/**
* Serializes an array to a protocol buffer string. This overrides the JSON
* method to output empty slots when the value is null or undefined.
* @param {Array} arr The array to serialize.
* @param {Array} sb Array used as a string builder.
* @override
*/
goog.proto.Serializer.prototype.serializeArray = function(arr, sb) {
var l = arr.length;
sb.push('[');
var emptySlots = 0;
var sep = '';
for (var i = 0; i < l; i++) {
if (arr[i] == null) { // catches undefined as well
emptySlots++;
} else {
if (emptySlots > 0) {
sb.push(goog.string.repeat(',', emptySlots));
emptySlots = 0;
}
sb.push(sep);
this.serializeInternal(arr[i], sb);
sep = ',';
|
}
}
sb.push(']');
};
|
|
million_points.py
|
"""Test converting an image to a pyramid.
"""
import numpy as np
import napari
|
with napari.gui_qt():
viewer = napari.view_points(points, face_color='red')
|
points = np.random.randint(100, size=(50_000, 2))
|
model.go
|
package model
// Copyright (c) 2018 Bhojpur Consulting Private Limited, India. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
import (
"container/list"
"errors"
"fmt"
"regexp"
"sort"
"strconv"
"strings"
"github.com/bhojpur/policy/pkg/config"
"github.com/bhojpur/policy/pkg/log"
"github.com/bhojpur/policy/pkg/util"
)
// Model represents the whole access control model.
type Model map[string]AssertionMap
// AssertionMap is the collection of assertions, can be "r", "p", "g", "e", "m".
type AssertionMap map[string]*Assertion
const defaultDomain string = ""
const defaultSeparator = "::"
var sectionNameMap = map[string]string{
"r": "request_definition",
"p": "policy_definition",
"g": "role_definition",
"e": "policy_effect",
"m": "matchers",
}
// Minimal required sections for a model to be valid
var requiredSections = []string{"r", "p", "e", "m"}
func loadAssertion(model Model, cfg config.ConfigInterface, sec string, key string) bool {
value := cfg.String(sectionNameMap[sec] + "::" + key)
return model.AddDef(sec, key, value)
}
// AddDef adds an assertion to the model.
func (model Model) AddDef(sec string, key string, value string) bool {
if value == ""
|
ast := Assertion{}
ast.Key = key
ast.Value = value
ast.PolicyMap = make(map[string]int)
ast.setLogger(model.GetLogger())
ast.initPriorityIndex()
if sec == "r" || sec == "p" {
ast.Tokens = strings.Split(ast.Value, ",")
for i := range ast.Tokens {
ast.Tokens[i] = key + "_" + strings.TrimSpace(ast.Tokens[i])
}
} else {
ast.Value = util.RemoveComments(util.EscapeAssertion(ast.Value))
}
if sec == "m" && strings.Contains(ast.Value, "in") {
ast.Value = strings.Replace(strings.Replace(ast.Value, "[", "(", -1), "]", ")", -1)
}
_, ok := model[sec]
if !ok {
model[sec] = make(AssertionMap)
}
model[sec][key] = &ast
return true
}
func getKeySuffix(i int) string {
if i == 1 {
return ""
}
return strconv.Itoa(i)
}
func loadSection(model Model, cfg config.ConfigInterface, sec string) {
i := 1
for {
if !loadAssertion(model, cfg, sec, sec+getKeySuffix(i)) {
break
} else {
i++
}
}
}
// SetLogger sets the model's logger.
func (model Model) SetLogger(logger log.Logger) {
for _, astMap := range model {
for _, ast := range astMap {
ast.logger = logger
}
}
model["logger"] = AssertionMap{"logger": &Assertion{logger: logger}}
}
// GetLogger returns the model's logger.
func (model Model) GetLogger() log.Logger {
return model["logger"]["logger"].logger
}
// NewModel creates an empty model.
func NewModel() Model {
m := make(Model)
m.SetLogger(&log.DefaultLogger{})
return m
}
// NewModelFromFile creates a model from a .CONF file.
func NewModelFromFile(path string) (Model, error) {
m := NewModel()
err := m.LoadModel(path)
if err != nil {
return nil, err
}
return m, nil
}
// NewModelFromString creates a model from a string which contains model text.
func NewModelFromString(text string) (Model, error) {
m := NewModel()
err := m.LoadModelFromText(text)
if err != nil {
return nil, err
}
return m, nil
}
// LoadModel loads the model from model CONF file.
func (model Model) LoadModel(path string) error {
cfg, err := config.NewConfig(path)
if err != nil {
return err
}
return model.loadModelFromConfig(cfg)
}
// LoadModelFromText loads the model from the text.
func (model Model) LoadModelFromText(text string) error {
cfg, err := config.NewConfigFromText(text)
if err != nil {
return err
}
return model.loadModelFromConfig(cfg)
}
func (model Model) loadModelFromConfig(cfg config.ConfigInterface) error {
for s := range sectionNameMap {
loadSection(model, cfg, s)
}
ms := make([]string, 0)
for _, rs := range requiredSections {
if !model.hasSection(rs) {
ms = append(ms, sectionNameMap[rs])
}
}
if len(ms) > 0 {
return fmt.Errorf("missing required sections: %s", strings.Join(ms, ","))
}
return nil
}
func (model Model) hasSection(sec string) bool {
section := model[sec]
return section != nil
}
// PrintModel prints the model to the log.
func (model Model) PrintModel() {
if !model.GetLogger().IsEnabled() {
return
}
var modelInfo [][]string
for k, v := range model {
if k == "logger" {
continue
}
for i, j := range v {
modelInfo = append(modelInfo, []string{k, i, j.Value})
}
}
model.GetLogger().LogModel(modelInfo)
}
func (model Model) SortPoliciesBySubjectHierarchy() error {
if model["e"]["e"].Value != "subjectPriority(p_eft) || deny" {
return nil
}
subIndex := 0
domainIndex := -1
for ptype, assertion := range model["p"] {
for index, token := range assertion.Tokens {
if token == fmt.Sprintf("%s_dom", ptype) {
domainIndex = index
break
}
}
policies := assertion.Policy
subjectHierarchyMap, err := getSubjectHierarchyMap(model["g"]["g"].Policy)
if err != nil {
return err
}
sort.SliceStable(policies, func(i, j int) bool {
domain1, domain2 := defaultDomain, defaultDomain
if domainIndex != -1 {
domain1 = policies[i][domainIndex]
domain2 = policies[j][domainIndex]
}
name1, name2 := getNameWithDomain(domain1, policies[i][subIndex]), getNameWithDomain(domain2, policies[j][subIndex])
p1 := subjectHierarchyMap[name1]
p2 := subjectHierarchyMap[name2]
return p1 > p2
})
for i, policy := range assertion.Policy {
assertion.PolicyMap[strings.Join(policy, ",")] = i
}
}
return nil
}
func getSubjectHierarchyMap(policies [][]string) (map[string]int, error) {
subjectHierarchyMap := make(map[string]int)
// Tree structure of role
policyMap := make(map[string][]string)
for _, policy := range policies {
if len(policy) < 2 {
return nil, errors.New("policy g expect 2 more params")
}
domain := defaultDomain
if len(policy) != 2 {
domain = policy[2]
}
child := getNameWithDomain(domain, policy[0])
parent := getNameWithDomain(domain, policy[1])
policyMap[parent] = append(policyMap[parent], child)
if _, ok := subjectHierarchyMap[child]; !ok {
subjectHierarchyMap[child] = 0
}
if _, ok := subjectHierarchyMap[parent]; !ok {
subjectHierarchyMap[parent] = 0
}
subjectHierarchyMap[child] = 1
}
// Use queues for levelOrder
queue := list.New()
for k, v := range subjectHierarchyMap {
root := k
if v != 0 {
continue
}
lv := 0
queue.PushBack(root)
for queue.Len() != 0 {
sz := queue.Len()
for i := 0; i < sz; i++ {
node := queue.Front()
queue.Remove(node)
nodeValue := node.Value.(string)
subjectHierarchyMap[nodeValue] = lv
if _, ok := policyMap[nodeValue]; ok {
for _, child := range policyMap[nodeValue] {
queue.PushBack(child)
}
}
}
lv++
}
}
return subjectHierarchyMap, nil
}
func getNameWithDomain(domain string, name string) string {
return domain + defaultSeparator + name
}
func (model Model) SortPoliciesByPriority() error {
for ptype, assertion := range model["p"] {
for index, token := range assertion.Tokens {
if token == fmt.Sprintf("%s_priority", ptype) {
assertion.priorityIndex = index
break
}
}
if assertion.priorityIndex == -1 {
continue
}
policies := assertion.Policy
sort.SliceStable(policies, func(i, j int) bool {
p1, err := strconv.Atoi(policies[i][assertion.priorityIndex])
if err != nil {
return true
}
p2, err := strconv.Atoi(policies[j][assertion.priorityIndex])
if err != nil {
return true
}
return p1 < p2
})
for i, policy := range assertion.Policy {
assertion.PolicyMap[strings.Join(policy, ",")] = i
}
}
return nil
}
func (model Model) ToText() string {
tokenPatterns := make(map[string]string)
pPattern, rPattern := regexp.MustCompile("^p_"), regexp.MustCompile("^r_")
for _, ptype := range []string{"r", "p"} {
for _, token := range model[ptype][ptype].Tokens {
tokenPatterns[token] = rPattern.ReplaceAllString(pPattern.ReplaceAllString(token, "p."), "r.")
}
}
if strings.Contains(model["e"]["e"].Value, "p_eft") {
tokenPatterns["p_eft"] = "p.eft"
}
s := strings.Builder{}
writeString := func(sec string) {
for ptype := range model[sec] {
value := model[sec][ptype].Value
for tokenPattern, newToken := range tokenPatterns {
value = strings.Replace(value, tokenPattern, newToken, -1)
}
s.WriteString(fmt.Sprintf("%s = %s\n", sec, value))
}
}
s.WriteString("[request_definition]\n")
writeString("r")
s.WriteString("[policy_definition]\n")
writeString("p")
if _, ok := model["g"]; ok {
s.WriteString("[role_definition]\n")
for ptype := range model["g"] {
s.WriteString(fmt.Sprintf("%s = %s\n", ptype, model["g"][ptype].Value))
}
}
s.WriteString("[policy_effect]\n")
writeString("e")
s.WriteString("[matchers]\n")
writeString("m")
return s.String()
}
func (model Model) Copy() Model {
newModel := NewModel()
for sec, m := range model {
newAstMap := make(AssertionMap)
for ptype, ast := range m {
newAstMap[ptype] = ast.copy()
}
newModel[sec] = newAstMap
}
newModel.SetLogger(model.GetLogger())
return newModel
}
|
{
return false
}
|
boom.go
|
// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"flag"
"fmt"
"os"
"regexp"
"runtime"
"tool/boom/boomer"
)
const notice = `
************************** WARNING ********************************
This project has moved to https://github.com/rakyll/hey
Use the following command to install the new binary:
$ go get github.com/rakyll/hey
Program boom might be broken in the long future, please update your
environment rather than depending on this deprecated binary.
*******************************************************************
`
var (
user = flag.String("u", "", "")
password = flag.String("p", "", "")
host = flag.String("h", "", "")
database = flag.String("d", "", "")
table = flag.String("t", "split", "")
output = flag.String("o", "", "")
c = flag.Int("c", 50, "")
n = flag.Int("n", 200, "")
q = flag.Int("q", 0, "")
l = flag.Int("l", 255, "")
mode = flag.Int("m", 1, "")
base = flag.Int("b", 1, "")
times = flag.Int("times", 1, "")
cpus = flag.Int("cpus", runtime.GOMAXPROCS(-1), "")
help = flag.Bool("help", false, "")
verify = flag.Bool("v", false, "")
vStep= flag.Bool("vstep", false, "")
vDeletePeer = flag.Uint64("vdeletepeer", 0, "")
rangeNumber = flag.Uint64("range-number", 1, "")
multiNumber = flag.Uint64("multi-number", 1, "")
clusterId = flag.Uint64("clusterid", 0, "")
clusterToken = flag.String("clustertoken", "", "")
masterAddr = flag.String("master-addr", "", "")
)
var usage = `Usage: boom [options...] <url>
Options:
-n Number of requests to run.
-c Number of requests to run concurrently. Total number of requests cannot
be smaller than the concurency level.
-q Rate limit, in seconds (QPS).
-o Output type. If none provided, a summary is printed.
"csv" is the only supported alternative. Dumps the response
metrics in comma-seperated values format.
-u fbase user name.
-p fbase password.
-h fbase host.
-d database.
-t table.
-m mode 1: random insert; 2: order insert; 3: order select 4: signal select 5: random select 6: hash random insert 7:hash random select 8:user tp table test
-times loop times
-cpus Number of used cpu cores.
(default for current machine is %d cores)
-l data length for random.
-v verify
-vstep verify step
-vdeletepeer verify with occurs peed delete duration(sec)
-clusterid clusterid uint64
-clustertoken clsuter token from master config
-master-addr ip:port
`
func main() {
fmt.Println(notice) // show deprecation notice
flag.Usage = func() {
fmt.Fprint(os.Stderr, fmt.Sprintf(usage, runtime.NumCPU()))
}
flag.Parse()
if *help {
usageAndExit("")
}
runtime.GOMAXPROCS(*cpus)
num := *n
conc := *c
q := *q
if num <= 0 || conc <= 0
|
if num < conc {
usageAndExit("n cannot be less than c")
}
if *output != "csv" && *output != "" {
usageAndExit("Invalid output type; only csv is supported.")
}
if len(*user) == 0 || len(*password) == 0 || len(*host) == 0 || len(*database) == 0 {
usageAndExit("Invalid mysql dns.")
}
(&boomer.Boomer{
N: num,
C: conc,
Qps: q,
DLen: *l,
Output: *output,
User: *user,
Password: *password,
Database: *database,
Table: *table,
Host: *host,
Mode: *mode,
Base: *base,
Times: *times,
Verify: *verify,
StepVerify: *vStep,
DeletePeerVerify: *vDeletePeer,
ClusterId: *clusterId,
ClusterToken: *clusterToken,
MasterAddr: *masterAddr,
RangeNumber: *rangeNumber,
MultiNumber: *multiNumber,
}).Run()
}
func usageAndExit(msg string) {
if msg != "" {
fmt.Fprintf(os.Stderr, msg)
fmt.Fprintf(os.Stderr, "\n\n")
}
flag.Usage()
fmt.Fprintf(os.Stderr, "\n")
os.Exit(1)
}
func parseInputWithRegexp(input, regx string) ([]string, error) {
re := regexp.MustCompile(regx)
matches := re.FindStringSubmatch(input)
if len(matches) < 1 {
return nil, fmt.Errorf("could not parse the provided input; input = %v", input)
}
return matches, nil
}
|
{
usageAndExit("n and c cannot be smaller than 1.")
}
|
tempestmail.py
|
#! /usr/bin/env python
# Copyright Red Hat, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import datetime
import logging
import os
import re
import requests
import smtplib
import sys
import yaml
from email.mime.text import MIMEText
from jinja2 import Environment
from jinja2 import FileSystemLoader
from six.moves.urllib.parse import urljoin
HREF = re.compile('href="([^"]+)"')
JOBRE = re.compile('[a-z0-9]{7}/')
TESTRE = re.compile(r'(tempest[^ \(\)]+|\w+\.tests\.[^ \(\)]+)')
TIMEST = re.compile(r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}):\d{2}\.\d+ \|')
TITLE = re.compile('<title>(.*?)</title>')
FAILED = "... FAILED"
OK = "... ok"
ERROR = "... ERROR"
SKIPPED = "... SKIPPED"
NLINKS = 1
def compare_tests(failures, config):
'''Detect fails covered by bugs and new'''
|
for test in config.known_failures:
if re.search(test.get('test'), fail):
covered.append({'failure': fail, 'reason': test.get('reason')})
new = [fail for fail in failures if not any(
c['failure'] == fail for c in covered)]
return covered, new
def get_html(url):
try:
resp = requests.get(url)
if resp is None:
raise TypeError
except TypeError as e:
print("Exception %s" % str(e))
return
return resp
def get_tests_results(console):
'''Get results of tests from console'''
failed = [TESTRE.search(l).group(1)
for l in console.splitlines() if FAILED in l]
ok = [TESTRE.search(l).group(1)
for l in console.splitlines() if OK in l]
errors = [TESTRE.search(l).group(1)
for l in console.splitlines() if ERROR in l]
# all_skipped = [TESTRE.search(l).group(1)
# for l in console.splitlines() if SKIPPED in l]
return failed, ok, errors
class Config(object):
pass
class Mail(object):
def __init__(self, config):
self.config = config
self.log = logging.getLogger('Mail')
self.mail_from = config.mail_from
self.username = config.username
self.password = config.password
self.smtp = config.smtp
self.require_auth = config.require_auth
self.templates_path = os.path.join(os.path.dirname(__file__),
config.templates_path)
self.template = config.template
def render_template(self, data):
self.log.debug('Rendering template')
env = Environment(loader=FileSystemLoader(self.templates_path))
env.filters['datetimeformat'] = self.datetimeformat
template = env.get_template(self.template)
return template.render(data=data)
def datetimeformat(self, value, format="%d-%m-%Y %H:%M"):
return value.strftime(format)
def filter_emails(self, job, data):
has_errors = False
bookaddr = {}
for error in [data.get(x, []) for x in ('new', 'failed', 'errors')]:
if error:
self.log.debug('There are tests with failed result')
has_errors = True
break
if has_errors:
# Check if the user is assigned for the job
# If there's no job assigned, we add the user anyway
emails = [m for m in self.config.emails if job in
m.get('jobs') or not
m.get('jobs')]
# Add all addresses except those that regex don't match
for email in emails:
add = True
if email.get('regex'):
for r in email.get('regex'):
if len(filter(r.search, data.get('new'))):
break
add = False
if add:
topics = ''
if email.get('topics'):
# Parse topics and format it between brackets
t = email.get('topics').split(',')
topics = ''.join('[{}]'.format(s) for s in t)
# Add the address to the bookaddr dict
# {'[foo][bar]' : ['[email protected]', '[email protected]']}
bookaddr.setdefault(topics, []).append(email.get('mail'))
else:
self.log.debug('No failures send email to everybody')
addresses = [m.get('mail') for m in self.config.emails
if not m.get('fail_only')]
# Single group with empty topic is added to the bookaddr
bookaddr.setdefault('', []).append(addresses)
data['has_errors'] = has_errors
return bookaddr
def _send_mail_local(self, addresses, message, subject, output):
msg = MIMEText(message, 'html')
msg['Subject'] = subject
msg['From'] = self.mail_from
msg['To'] = ",".join(addresses)
s = smtplib.SMTP(self.smtp)
if self.require_auth:
s.ehlo()
s.starttls()
s.login(self.username, self.password)
s.sendmail(self.mail_from, addresses, msg.as_string())
self.log.debug('Sending mail')
s.quit()
if output:
self.log.debug('Writing email in {}'.format(output))
with open(output, 'w') as f:
f.write(msg.as_string())
def _send_mail_api(self, addresses, message, subject):
data = {'addresses': addresses, 'message': message, 'subject': subject,
'mime_type': 'html'}
requests.post(self.config.api_server, data=data)
def send_mail(self, job, data, output):
bookaddr = self.filter_emails(job, data)
message = self.render_template(data)
# Send a separate email to the addresses grouped by topics
for topics, addresses in bookaddr.items():
subject = '{} Job {} results'.format(topics, job).lstrip()
if self.config.use_api_server:
self._send_mail_api(addresses, message, subject)
else:
self._send_mail_local(addresses, message, subject, output)
class TempestMailCmd(object):
def parse_arguments(self, args):
parser = argparse.ArgumentParser(description='tempest-mail')
parser.add_argument('-c', dest='config',
default='/etc/tempest-mail/tempest-mail.yaml',
help='Path to config file')
parser.add_argument('-l', dest='logconfig',
help='Path to log config file')
parser.add_argument('--version', dest='version',
help='Show version')
parser.add_argument('--job', dest='job',
help='Job name', required=True)
parser.add_argument('--file', dest='file',
help='File containing tempest output')
parser.add_argument('--skip-file', dest='skip_file',
help='List of skip files')
parser.add_argument('--output', dest='output',
help='Save the email content in a file')
parser.add_argument('--log-url', dest='log_url',
help='Set log url')
self.args = parser.parse_args(args)
def setup_logging(self):
self.log = logging.getLogger('tempestmail.TempestMail')
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(name)s: '
'%(message)s')
def get_index(self):
'''Get index page of periodic job and returns all links to jobs'''
url = urljoin(self.config.log_url, self.args.job)
res = get_html(url)
if res is None or not res.ok:
return []
body = res.content.decode() if res.content else ''
hrefs = [HREF.search(l).group(1)
for l in body.splitlines() if HREF.search(l)]
links = ["/".join((url, link))
for link in hrefs if JOBRE.match(link)]
if links:
# Number of links to return
return links[:NLINKS]
else:
return []
def get_console(self, job_url=None):
'''Get console page of job'''
if self.args.file and not job_url:
try:
with open(self.args.file) as f:
console = f.read()
except IOError:
return (None, None, None)
log_path = os.environ.get('LOG_PATH', None)
if log_path:
log_path = urljoin(getattr(
self.args, 'log_url', self.config.default_log_url), log_path)
return (console, datetime.datetime.now(), log_path)
def _good_result(res):
if res is None or int(res.status_code) not in (200, 404):
return False
else:
return True
def _get_date(c):
text = c.splitlines()
# find last line with timestamp
for l in text[::-1]:
if TIMEST.match(l):
return datetime.datetime.strptime(
TIMEST.search(l).group(1),
"%Y-%m-%d %H:%M")
return None
url = urljoin(job_url, "console.html.gz")
res = get_html(url)
if not _good_result(res):
print("Error getting console %s" % url)
# Try again
res = get_html(url)
if not _good_result(res):
return (None, None, None)
elif int(res.status_code) == 404:
url = urljoin(job_url, "console.html")
res = get_html(url)
if not _good_result(res):
# Try again
res = get_html(url)
if not _good_result(res):
print("Error getting console %s" % url)
return (None, None, None)
console = res.content.decode('utf-8')
date = _get_date(console)
return console, date, url
def get_data(self, console, date, link):
fails, ok, errors = get_tests_results(console)
d = {
'run': True,
'date': date,
'link': link,
'job': self.args.job
}
if fails or errors:
covered, new = compare_tests(fails, self.config)
d.update({
'failed': fails,
'covered': covered,
'new': new,
'errors': errors,
})
elif ok:
d['ok'] = ok
elif not fails and not ok and not errors:
d['run'] = False
return d
def load_skip_file(self, skipfile):
known_failures = []
try:
skip = yaml.safe_load(open(self.args.skip_file))
except yaml.constructor.ConstructorError:
self.log.error('Invalid yaml file {}'.format(self.args.skip_file))
else:
for t in skip.get('known_failures'):
known_failures.append({'test': t.get('test'),
'reason': t.get('reason')})
return known_failures
def checkJobs(self):
data = []
if self.args.file:
console, date, link = self.get_console()
link = link or ''
d = self.get_data(console, date, link)
data.append(d)
else:
index = self.get_index()
for run in index:
console, date, link = self.get_console(run)
if not console or not date:
continue
link = link or ''
d = self.get_data(console, date, link)
data.append(d)
data = sorted(data, key=lambda x: x['date'])
last = data[-1]
send_mail = Mail(self.config)
send_mail.send_mail(self.args.job, last, self.args.output)
def setupConfig(self):
self.log.debug("Loading configuration")
try:
config = yaml.safe_load(open(self.args.config))
except yaml.constructor.ConstructorError:
self.log.error('Invalid yaml file {}'.format(self.args.config))
newconfig = Config()
known_failures = []
newconfig.emails = []
newconfig.username = config.get('mail_username', '')
newconfig.password = config.get('mail_password', '')
newconfig.mail_from = config.get('mail_from', '')
newconfig.smtp = config.get('smtp_server', '')
newconfig.templates_path = config.get('templates_path')
newconfig.template = config.get('template')
newconfig.log_url = config.get('log_url')
newconfig.require_auth = config.get('require_auth', False)
newconfig.default_log_url = config.get('default_log_url',
'http://logs.openstack.org')
for e in config.get('emails'):
regex = [re.compile(r) for r in e.get('regex', [])]
newconfig.emails.append({'name': e.get('name'),
'mail': e.get('mail'),
'jobs': e.get('jobs', []),
'regex': regex,
'topics': e.get('topics'),
'fail_only': e.get('fail_only', False)})
for t in config.get('known_failures', []):
known_failures.append({'test': t.get('test'),
'reason': t.get('reason')})
if self.args.skip_file:
known_failures = (
known_failures + self.load_skip_file(self.args.skip_file))
newconfig.known_failures = known_failures
newconfig.api_server = config.get('api_server')
newconfig.use_api_server = config.get('use_api_server', False)
self.config = newconfig
def main():
tmc = TempestMailCmd()
tmc.parse_arguments(sys.argv[1:])
tmc.setup_logging()
tmc.setupConfig()
tmc.checkJobs()
if __name__ == '__main__':
sys.exit(main())
|
covered, new = [], []
for fail in failures:
|
messenger-theme-flat.js
|
/*
* IDCopier
*
* Copyright (C) 2017 Black Duck Software, Inc.
* http://www.blackducksoftware.com/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
(function() {
var $, FlatMessage, spinner_template,
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
|
FlatMessage = (function(_super) {
__extends(FlatMessage, _super);
function FlatMessage() {
return FlatMessage.__super__.constructor.apply(this, arguments);
}
FlatMessage.prototype.template = function(opts) {
var $message;
$message = FlatMessage.__super__.template.apply(this, arguments);
$message.append($(spinner_template));
return $message;
};
return FlatMessage;
})(window.Messenger.Message);
window.Messenger.themes.flat = {
Message: FlatMessage
};
}).call(this);
|
$ = jQuery;
spinner_template = '<div class="messenger-spinner">\n <span class="messenger-spinner-side messenger-spinner-side-left">\n <span class="messenger-spinner-fill"></span>\n </span>\n <span class="messenger-spinner-side messenger-spinner-side-right">\n <span class="messenger-spinner-fill"></span>\n </span>\n</div>';
|
resume_subscription_amendment.py
|
# coding: utf-8
"""
BillForward REST API
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class ResumeSubscriptionAmendment(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, created=None, changed_by=None, updated=None, type=None, id=None, organization_id=None, subscription_id=None, amendment_type=None, actioning_time=None, actioned_time=None, state=None, deleted=False):
"""
ResumeSubscriptionAmendment - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'created': 'datetime',
'changed_by': 'str',
'updated': 'datetime',
'type': 'str',
'id': 'str',
'organization_id': 'str',
'subscription_id': 'str',
'amendment_type': 'str',
'actioning_time': 'datetime',
'actioned_time': 'datetime',
'state': 'str',
'deleted': 'bool'
}
self.attribute_map = {
'created': 'created',
'changed_by': 'changedBy',
'updated': 'updated',
'type': '@type',
'id': 'id',
'organization_id': 'organizationID',
'subscription_id': 'subscriptionID',
'amendment_type': 'amendmentType',
'actioning_time': 'actioningTime',
'actioned_time': 'actionedTime',
'state': 'state',
'deleted': 'deleted'
}
self._created = created
self._changed_by = changed_by
self._updated = updated
self._type = type
self._id = id
self._organization_id = organization_id
self._subscription_id = subscription_id
self._amendment_type = amendment_type
self._actioning_time = actioning_time
self._actioned_time = actioned_time
self._state = state
self._deleted = deleted
@property
def created(self):
"""
Gets the created of this ResumeSubscriptionAmendment.
{ \"description\" : \"The UTC DateTime when the object was created.\", \"verbs\":[] }
:return: The created of this ResumeSubscriptionAmendment.
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""
Sets the created of this ResumeSubscriptionAmendment.
{ \"description\" : \"The UTC DateTime when the object was created.\", \"verbs\":[] }
:param created: The created of this ResumeSubscriptionAmendment.
:type: datetime
"""
self._created = created
@property
def changed_by(self):
"""
Gets the changed_by of this ResumeSubscriptionAmendment.
{ \"description\" : \"ID of the user who last updated the entity.\", \"verbs\":[] }
:return: The changed_by of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._changed_by
@changed_by.setter
def changed_by(self, changed_by):
"""
Sets the changed_by of this ResumeSubscriptionAmendment.
{ \"description\" : \"ID of the user who last updated the entity.\", \"verbs\":[] }
:param changed_by: The changed_by of this ResumeSubscriptionAmendment.
:type: str
"""
self._changed_by = changed_by
@property
def updated(self):
"""
Gets the updated of this ResumeSubscriptionAmendment.
{ \"description\" : \"The UTC DateTime when the object was last updated.\", \"verbs\":[] }
:return: The updated of this ResumeSubscriptionAmendment.
:rtype: datetime
"""
return self._updated
@updated.setter
def updated(self, updated):
"""
Sets the updated of this ResumeSubscriptionAmendment.
{ \"description\" : \"The UTC DateTime when the object was last updated.\", \"verbs\":[] }
:param updated: The updated of this ResumeSubscriptionAmendment.
:type: datetime
"""
self._updated = updated
@property
def type(self):
"""
Gets the type of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"default\" : \"\", \"verbs\":[\"POST\",\"GET\"] }
:return: The type of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"default\" : \"\", \"verbs\":[\"POST\",\"GET\"] }
:param type: The type of this ResumeSubscriptionAmendment.
:type: str
"""
allowed_values = ["InvoiceOutstandingChargesAmendment", "IssueInvoiceAmendment", "PricingComponentValueAmendment", "InvoiceRecalculationAmendment", "CancellationAmendment", "InvoiceNextExecutionAttemptAmendment", "FixedTermExpiryAmendment", "EndTrialAmendment", "ProductRatePlanMigrationAmendment", "AmendmentDiscardAmendment", "UpdateComponentValueAmendment", "ServiceEndAmendment", "ResumeSubscriptionAmendment", "CreateSubscriptionChargeAmendment", "TimerAmendment"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}"
.format(type, allowed_values)
)
self._type = type
@property
def id(self):
"""
Gets the id of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:return: The id of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"verbs\":[\"GET\"] }
:param id: The id of this ResumeSubscriptionAmendment.
:type: str
"""
self._id = id
@property
def organization_id(self):
"""
Gets the organization_id of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"verbs\":[\"\"] }
:return: The organization_id of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._organization_id
@organization_id.setter
def organization_id(self, organization_id):
"""
Sets the organization_id of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"verbs\":[\"\"] }
:param organization_id: The organization_id of this ResumeSubscriptionAmendment.
:type: str
"""
self._organization_id = organization_id
@property
def subscription_id(self):
"""
Gets the subscription_id of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"verbs\":[\"POST\",\"PUT\",\"GET\"] }
:return: The subscription_id of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._subscription_id
@subscription_id.setter
def subscription_id(self, subscription_id):
"""
Sets the subscription_id of this ResumeSubscriptionAmendment.
{ \"description\" : \"\", \"verbs\":[\"POST\",\"PUT\",\"GET\"] }
:param subscription_id: The subscription_id of this ResumeSubscriptionAmendment.
:type: str
"""
self._subscription_id = subscription_id
@property
def amendment_type(self):
"""
Gets the amendment_type of this ResumeSubscriptionAmendment.
{ \"description\" : \"Type of amendment\", \"verbs\":[\"POST\",\"GET\"] }
:return: The amendment_type of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._amendment_type
@amendment_type.setter
def amendment_type(self, amendment_type):
"""
Sets the amendment_type of this ResumeSubscriptionAmendment.
{ \"description\" : \"Type of amendment\", \"verbs\":[\"POST\",\"GET\"] }
:param amendment_type: The amendment_type of this ResumeSubscriptionAmendment.
:type: str
"""
allowed_values = ["InvoiceNextExecutionAttempt", "Cancellation", "PricingComponentValue", "AmendmentDiscard", "Compound", "FixedTermExpiry", "InvoiceRecalculation", "EndTrial", "InvoiceOutstandingCharges", "IssueInvoice", "ProductRatePlanMigration", "UpdateComponentValue", "ServiceEnd", "ResumeSubscription", "CreateSubscriptionCharge", "Timer"]
if amendment_type not in allowed_values:
raise ValueError(
"Invalid value for `amendment_type` ({0}), must be one of {1}"
.format(amendment_type, allowed_values)
)
self._amendment_type = amendment_type
@property
def actioning_time(self):
"""
Gets the actioning_time of this ResumeSubscriptionAmendment.
{ \"description\" : \"When the amendment will run\", \"verbs\":[\"POST\",\"PUT\",\"GET\"] }
:return: The actioning_time of this ResumeSubscriptionAmendment.
:rtype: datetime
"""
return self._actioning_time
@actioning_time.setter
def actioning_time(self, actioning_time):
"""
Sets the actioning_time of this ResumeSubscriptionAmendment.
{ \"description\" : \"When the amendment will run\", \"verbs\":[\"POST\",\"PUT\",\"GET\"] }
:param actioning_time: The actioning_time of this ResumeSubscriptionAmendment.
:type: datetime
"""
self._actioning_time = actioning_time
@property
def actioned_time(self):
"""
Gets the actioned_time of this ResumeSubscriptionAmendment.
{ \"description\" : \"The time the amendment completed.\", \"verbs\":[\"GET\"] }
:return: The actioned_time of this ResumeSubscriptionAmendment.
:rtype: datetime
"""
return self._actioned_time
@actioned_time.setter
def actioned_time(self, actioned_time):
"""
Sets the actioned_time of this ResumeSubscriptionAmendment.
{ \"description\" : \"The time the amendment completed.\", \"verbs\":[\"GET\"] }
:param actioned_time: The actioned_time of this ResumeSubscriptionAmendment.
:type: datetime
"""
self._actioned_time = actioned_time
@property
def state(self):
"""
Gets the state of this ResumeSubscriptionAmendment.
Whether the subscription-amendment is: pending (to be actioned in the future), succeeded (actioning completed), failed (actioning was attempted but no effect was made) or discarded (the amendment had been cancelled before being actioned). Default: Pending
:return: The state of this ResumeSubscriptionAmendment.
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""
Sets the state of this ResumeSubscriptionAmendment.
Whether the subscription-amendment is: pending (to be actioned in the future), succeeded (actioning completed), failed (actioning was attempted but no effect was made) or discarded (the amendment had been cancelled before being actioned). Default: Pending
:param state: The state of this ResumeSubscriptionAmendment.
:type: str
"""
allowed_values = ["Pending", "Succeeded", "Failed", "Discarded"]
if state not in allowed_values:
raise ValueError(
"Invalid value for `state` ({0}), must be one of {1}"
.format(state, allowed_values)
)
self._state = state
@property
def deleted(self):
"""
Gets the deleted of this ResumeSubscriptionAmendment.
{ \"description\" : \"Is the amendment deleted.\", \"verbs\":[\"GET\"] }
:return: The deleted of this ResumeSubscriptionAmendment.
:rtype: bool
"""
return self._deleted
@deleted.setter
def deleted(self, deleted):
"""
Sets the deleted of this ResumeSubscriptionAmendment.
{ \"description\" : \"Is the amendment deleted.\", \"verbs\":[\"GET\"] }
:param deleted: The deleted of this ResumeSubscriptionAmendment.
:type: bool
"""
self._deleted = deleted
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
|
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
|
init.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package databricks
import (
"fmt"
"github.com/blang/semver"
"github.com/pulumi/pulumi-azure-nextgen/sdk/go/azure"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
type module struct {
version semver.Version
}
|
return m.version
}
func (m *module) Construct(ctx *pulumi.Context, name, typ, urn string) (r pulumi.Resource, err error) {
switch typ {
case "azure-nextgen:databricks:Workspace":
r, err = NewWorkspace(ctx, name, nil, pulumi.URN_(urn))
case "azure-nextgen:databricks:vNetPeering":
r, err = NewVNetPeering(ctx, name, nil, pulumi.URN_(urn))
default:
return nil, fmt.Errorf("unknown resource type: %s", typ)
}
return
}
func init() {
version, err := azure.PkgVersion()
if err != nil {
fmt.Println("failed to determine package version. defaulting to v1: %v", err)
}
pulumi.RegisterResourceModule(
"azure-nextgen",
"databricks",
&module{version},
)
}
|
func (m *module) Version() semver.Version {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.